diff --git a/.cargo/config.toml b/.cargo/config.toml index c4cd35e56bc..f1a26708418 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -2,6 +2,7 @@ build-man = "run --package xtask-build-man --" stale-label = "run --package xtask-stale-label --" bump-check = "run --package xtask-bump-check --" +lint-docs = "run --package xtask-lint-docs --" [env] # HACK: Until this is stabilized, `snapbox`s polyfill could get confused diff --git a/.github/renovate.json5 b/.github/renovate.json5 index 50d5e2f0aeb..7fdcf0d5cc4 100644 --- a/.github/renovate.json5 +++ b/.github/renovate.json5 @@ -8,6 +8,8 @@ ignorePaths: [ '**/tests/**', ], + // See rust-lang/cargo#13546 and openssl/openssl#23376 for the exclusion + ignoreDeps: ['openssl', 'openssl-src', 'openssl-sys'], customManagers: [ { customType: 'regex', @@ -78,8 +80,6 @@ matchUpdateTypes: [ 'patch', ], - // See rust-lang/cargo#13546 and openssl/openssl#23376 for the exclusion - excludePackageNames: ['openssl', 'openssl-src', 'openssl-sys'], automerge: false, groupName: 'compatible', }, @@ -91,8 +91,6 @@ matchUpdateTypes: [ 'minor', ], - // See rust-lang/cargo#13546 and openssl/openssl#23376 for the exclusion - excludePackageNames: ['openssl', 'openssl-src', 'openssl-sys'], automerge: false, groupName: 'compatible', }, diff --git a/.github/workflows/audit.yml b/.github/workflows/audit.yml index d903eb0d71d..ea126fc6071 100644 --- a/.github/workflows/audit.yml +++ b/.github/workflows/audit.yml @@ -22,7 +22,7 @@ jobs: - bans licenses sources steps: - uses: actions/checkout@v4 - - uses: EmbarkStudios/cargo-deny-action@v1 + - uses: EmbarkStudios/cargo-deny-action@v2 # Prevent sudden announcement of a new advisory from failing ci: continue-on-error: ${{ matrix.checks == 'advisories' }} with: diff --git a/.github/workflows/contrib.yml b/.github/workflows/contrib.yml index 89b0fa540d1..5756c5136dd 100644 --- a/.github/workflows/contrib.yml +++ b/.github/workflows/contrib.yml @@ -23,7 +23,7 @@ jobs: - name: Install mdbook run: | mkdir mdbook - curl -Lf https://github.com/rust-lang/mdBook/releases/download/v0.4.37/mdbook-v0.4.37-x86_64-unknown-linux-gnu.tar.gz | tar -xz --directory=./mdbook + curl -Lf https://github.com/rust-lang/mdBook/releases/download/v0.4.40/mdbook-v0.4.40-x86_64-unknown-linux-gnu.tar.gz | tar -xz --directory=./mdbook echo `pwd`/mdbook >> $GITHUB_PATH - name: Deploy docs run: | diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index e680a32d61f..93140c4705d 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -29,7 +29,7 @@ jobs: - build_std - clippy - msrv - - docs +# - docs - lockfile - resolver - rustfmt @@ -47,7 +47,7 @@ jobs: - build_std - clippy - msrv - - docs +# - docs - lockfile - resolver - rustfmt @@ -72,7 +72,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - run: rustup update stable && rustup default stable + - run: rustup toolchain install 1.79.0 && rustup default 1.79.0 - run: rustup component add clippy - run: cargo clippy --workspace --all-targets --no-deps -- -D warnings @@ -83,6 +83,13 @@ jobs: - run: rustup update stable && rustup default stable - run: cargo stale-label + lint-docs: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - run: rustup update stable && rustup default stable + - run: cargo lint-docs --check + # Ensure Cargo.lock is up-to-date lockfile: runs-on: ubuntu-latest @@ -100,11 +107,11 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 - - run: rustup update stable && rustup default stable + - run: rustup toolchain install 1.78.0 && rustup default 1.78.0 - name: Install cargo-semver-checks run: | mkdir installed-bins - curl -Lf https://github.com/obi1kenobi/cargo-semver-checks/releases/download/v0.29.0/cargo-semver-checks-x86_64-unknown-linux-gnu.tar.gz \ + curl -Lf https://github.com/obi1kenobi/cargo-semver-checks/releases/download/v0.35.0/cargo-semver-checks-x86_64-unknown-linux-gnu.tar.gz \ | tar -xz --directory=./installed-bins echo `pwd`/installed-bins >> $GITHUB_PATH - run: ci/validate-version-bump.sh @@ -123,7 +130,7 @@ jobs: include: - name: Linux x86_64 stable os: ubuntu-latest - rust: stable + rust: 1.79.0 other: i686-unknown-linux-gnu - name: Linux x86_64 beta os: ubuntu-latest @@ -133,26 +140,26 @@ jobs: os: ubuntu-latest rust: nightly other: i686-unknown-linux-gnu - - name: macOS x86_64 stable - os: macos-13 - rust: stable - other: x86_64-apple-ios + - name: macOS aarch64 stable + os: macos-14 + rust: 1.79.0 + other: x86_64-apple-darwin - name: macOS x86_64 nightly os: macos-13 rust: nightly other: x86_64-apple-ios - - name: macOS aarch64 stable + - name: macOS aarch64 nightly os: macos-14 - rust: stable + rust: nightly other: x86_64-apple-darwin - name: Windows x86_64 MSVC stable os: windows-latest - rust: stable-msvc + rust: 1.79.0-msvc other: i686-pc-windows-msvc - - name: Windows x86_64 gnu nightly # runs out of space while trying to link the test suite - os: windows-latest - rust: nightly-gnu - other: i686-pc-windows-gnu +# - name: Windows x86_64 gnu nightly # runs out of space while trying to link the test suite +# os: windows-latest +# rust: nightly-gnu +# other: i686-pc-windows-gnu name: Tests ${{ matrix.name }} steps: - uses: actions/checkout@v4 @@ -217,7 +224,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - run: rustup update --no-self-update stable && rustup default stable + - run: rustup toolchain install 1.79.0 && rustup default 1.79.0 - run: rustup target add i686-unknown-linux-gnu - run: sudo apt update -y && sudo apt install gcc-multilib libsecret-1-0 libsecret-1-dev -y - run: rustup component add rustfmt || echo "rustfmt not available" @@ -229,42 +236,42 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - run: rustup update nightly && rustup default nightly + - run: rustup update stable && rustup default stable - run: rustup component add rust-src - run: cargo build - run: cargo test -p cargo --test build-std env: CARGO_RUN_BUILD_STD_TESTS: 1 - docs: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - run: rustup update nightly && rustup default nightly - - run: rustup update stable - - run: rustup component add rust-docs - - run: ci/validate-man.sh - # This requires rustfmt, use stable. - - name: Run semver-check - run: cargo +stable run -p semver-check - - name: Ensure intradoc links are valid - run: cargo doc --workspace --document-private-items --no-deps - env: - RUSTDOCFLAGS: -D warnings - - name: Install mdbook - run: | - mkdir mdbook - curl -Lf https://github.com/rust-lang/mdBook/releases/download/v0.4.37/mdbook-v0.4.37-x86_64-unknown-linux-gnu.tar.gz | tar -xz --directory=./mdbook - echo `pwd`/mdbook >> $GITHUB_PATH - - run: cd src/doc && mdbook build --dest-dir ../../target/doc - - name: Run linkchecker.sh - run: | - cd target - curl -sSLO https://raw.githubusercontent.com/rust-lang/rust/master/src/tools/linkchecker/linkcheck.sh - sh linkcheck.sh --all --path ../src/doc cargo +# docs: +# runs-on: ubuntu-latest +# steps: +# - uses: actions/checkout@v4 +# - run: rustup update nightly && rustup default nightly +# - run: rustup update stable +# - run: rustup component add rust-docs +# - run: ci/validate-man.sh +# # This requires rustfmt, use stable. +# - name: Run semver-check +# run: cargo +stable run -p semver-check +# - name: Ensure intradoc links are valid +# run: cargo doc --workspace --document-private-items --no-deps +# env: +# RUSTDOCFLAGS: -D warnings +# - name: Install mdbook +# run: | +# mkdir mdbook +# curl -Lf https://github.com/rust-lang/mdBook/releases/download/v0.4.37/mdbook-v0.4.37-x86_64-unknown-linux-gnu.tar.gz | tar -xz --directory=./mdbook +# echo `pwd`/mdbook >> $GITHUB_PATH +# - run: cd src/doc && mdbook build --dest-dir ../../target/doc +# - name: Run linkchecker.sh +# run: | +# cd target +# curl -sSLO https://raw.githubusercontent.com/rust-lang/rust/master/src/tools/linkchecker/linkcheck.sh +# sh linkcheck.sh --all --path ../src/doc cargo msrv: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - uses: taiki-e/install-action@cargo-hack - - run: cargo hack check --all-targets --rust-version --workspace --ignore-private --locked + - run: cargo hack check --all-targets --workspace --ignore-private --locked diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 00000000000..fd35c9b86e5 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,33 @@ +# Publish Cargo to crates.io whenever a new tag is pushed. Tags are pushed by +# the Rust release process (https://github.com/rust-lang/promote-release), +# which will cause this workflow to run. + +name: Release +on: + push: + tags: + - "**" + +# Prevent multiple releases from starting at the same time. +concurrency: + group: release + +jobs: + crates-io: + name: Publish on crates.io + runs-on: ubuntu-latest + permissions: + contents: read + + # Gain access to the crates.io publishing token. + environment: + name: release + + steps: + - name: Checkout the source code + uses: actions/checkout@v4 + + - name: Publish Cargo to crates.io + run: ./publish.py + env: + CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }} diff --git a/.gitignore b/.gitignore index 1cd9ebdfa4f..e10c0128f11 100644 --- a/.gitignore +++ b/.gitignore @@ -1,11 +1,11 @@ -target -Cargo.lock +/target +/Cargo.lock /config.stamp /Makefile /config.mk -src/doc/build -src/etc/*.pyc -src/registry/target +/src/doc/build +/src/etc/*.pyc +/src/registry/target rustc __pycache__ .idea/ diff --git a/CHANGELOG.md b/CHANGELOG.md index bb0ea94fb5c..90101de51d0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,19 +1,693 @@ # Changelog -## Cargo 1.79 (2024-06-13) -[2fe739fc...HEAD](https://github.com/rust-lang/cargo/compare/2fe739fc...HEAD) +## Cargo 1.83 (2024-11-28) +[8f40fc59...HEAD](https://github.com/rust-lang/cargo/compare/8f40fc59...HEAD) + +### Added + +### Changed + +- Enhanced the missing feature error message. + [#14436](https://github.com/rust-lang/cargo/pull/14436) +- Enhanced the dependency update status message, including + - Reports incompatible packages with precise Rust version + [#14457](https://github.com/rust-lang/cargo/pull/14457) + - Reports incompatible-with-rustc when MSRV-resolver is disabled + [#14459](https://github.com/rust-lang/cargo/pull/14459) + - Reports compatible, incompatible, and direct-dep updates with different + colors and messages. + [#14461](https://github.com/rust-lang/cargo/pull/14461) + - The `Locking` status message stops showing workspace members. + [#14445](https://github.com/rust-lang/cargo/pull/14445) +- Log details of `rustc` invocation failure if no errors are seen + [#14453](https://github.com/rust-lang/cargo/pull/14453) +- cargo-package: Don't automatically include the current crate when packaging + [#14488](https://github.com/rust-lang/cargo/pull/14488) + +### Fixed + +- cargo-add: Perform fuzzy search when translating package names + [#13765](https://github.com/rust-lang/cargo/pull/13765) + +### Nightly only + +- cargo-update: Add `matches_prerelease` semantic + [#14305](https://github.com/rust-lang/cargo/pull/14305) +- `open-namespaces`: Allow open namespaces in `PackageIdSpec`s + [#14467](https://github.com/rust-lang/cargo/pull/14467) + +### Documentation + +### Internal + +- Updated to `pasetors` 0.7.0 + [#14478](https://github.com/rust-lang/cargo/pull/14478) +- cargo-update: Prepare for smarter update messages + [#14440](https://github.com/rust-lang/cargo/pull/14440) + +## Cargo 1.82 (2024-10-17) +[a2b58c3d...rust-1.82.0](https://github.com/rust-lang/cargo/compare/a2b58c3d...rust-1.82.0) + +### Added + +- πŸŽ‰ Added `cargo info` command for displaying information about a package. + [docs](https://doc.rust-lang.org/nightly/cargo/commands/cargo-info.html) + [#14141](https://github.com/rust-lang/cargo/pull/14141) + [#14418](https://github.com/rust-lang/cargo/pull/14418) + [#14430](https://github.com/rust-lang/cargo/pull/14430) + +### Changed + +- ❗️ Doctest respects Cargo's color options by passing `--color` to rustdoc invocations. + [#14425](https://github.com/rust-lang/cargo/pull/14425) +- Improved error message for missing both `[package]` and `[workspace]` in Cargo.toml. + [#14261](https://github.com/rust-lang/cargo/pull/14261) +- Enumerate all possible values of `profile.*.debug` for the error message. + [#14413](https://github.com/rust-lang/cargo/pull/14413) + +### Fixed + +- Use longhand gitoxide path-spec patterns. Previously the implementation used + shorthand pathspecs, which could produce invalid syntax, for example, if the + path to the manifest file contained a leading `_` underscore + [#14380](https://github.com/rust-lang/cargo/pull/14380) +- cargo-package: fix failures on bare commit git repo. + [#14359](https://github.com/rust-lang/cargo/pull/14359) +- cargo-publish: Don't strip non-dev features for renamed dependencies from the + HTTP JSON body sent to the registry. + The bug only affected third-party registries. + [#14325](https://github.com/rust-lang/cargo/pull/14325) + [#14327](https://github.com/rust-lang/cargo/pull/14327) +- cargo-vendor: don't copy source files of excluded Cargo targets when vendoring. + [#14367](https://github.com/rust-lang/cargo/pull/14367) + +### Nightly only + +- πŸ”₯ `lockfile-path`: Added `--lockfile-path` flag that allows specifying a path + to the lockfile other than the default path `/Cargo.lock`. + ([docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#lockfile-path)) + [#14326](https://github.com/rust-lang/cargo/pull/14326) + [#14417](https://github.com/rust-lang/cargo/pull/14417) + [#14423](https://github.com/rust-lang/cargo/pull/14423) + [#14424](https://github.com/rust-lang/cargo/pull/14424) +- πŸ”₯ `path-bases`: Introduced a table of path "bases" in Cargo configuration files + that can be used to prefix the paths of path dependencies and patch entries. + ([RFC 3529](https://github.com/rust-lang/rfcs/blob/master/text/3529-cargo-path-bases.md)) + ([docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#path-bases)) + [#14360](https://github.com/rust-lang/cargo/pull/14360) +- πŸ”₯ `-Zpackage-workspace`: Enhanced the experience of `cargo package --workspace` + when there are dependencies between crates in the workspace. + Crates in a workspace are no longer required to publish to actual registries. + This is a step toward supporting `cargo publish --workspace`. + [#13947](https://github.com/rust-lang/cargo/pull/13947) + [#14408](https://github.com/rust-lang/cargo/pull/14408) + [#14340](https://github.com/rust-lang/cargo/pull/14340) +- cargo-update: Limit pre-release match semantics to use only on `OptVersionReq::Req` + [#14412](https://github.com/rust-lang/cargo/pull/14412) +- `edition2024`: Revert "fix: Ensure dep/feature activates the dependency on 2024". + [#14295](https://github.com/rust-lang/cargo/pull/14295) +- `update-breaking`: Improved error message when `update --breaking` has an invalid spec + [#14279](https://github.com/rust-lang/cargo/pull/14279) +- `update-breaking`: Don’t downgrade on prerelease `VersionReq` when updating with `--breaking` + [#14250](https://github.com/rust-lang/cargo/pull/14250) +- `-Zbuild-std`: remove hack on creating virtual std workspace + [#14358](https://github.com/rust-lang/cargo/pull/14358) + [#14370](https://github.com/rust-lang/cargo/pull/14370) +- `-Zmsrv-policy`: Adjust MSRV resolve config field name / values. + The previous placeholder `resolver.something-like-precedence` + is now renamed to `resolver.incompatible-rust-versions`. + [#14296](https://github.com/rust-lang/cargo/pull/14296) +- `-Zmsrv-policy`: : Report when incompatible-rust-version packages are selected + [#14401](https://github.com/rust-lang/cargo/pull/14401) +- `-Ztarget-applies-to-host`: Fixed passing of links-overrides with + target-applies-to-host and an implicit target + [#14205](https://github.com/rust-lang/cargo/pull/14205) +- `-Ztarget-applies-to-host`: `-Cmetadata` includes whether extra rustflags is same as host + [#14432](https://github.com/rust-lang/cargo/pull/14432) +- `-Ztrim-paths`: rustdoc supports trim-paths for diagnostics + [#14389](https://github.com/rust-lang/cargo/pull/14389) + +### Documentation + +- Convert comments to doc comments for `Workspace`. + [#14397](https://github.com/rust-lang/cargo/pull/14397) +- Fix MSRV indicator for `workspace.package` and `workspace.dependencies`. + [#14400](https://github.com/rust-lang/cargo/pull/14400) +- FAQ: remove outdated Cargo offline usage section. + [#14336](https://github.com/rust-lang/cargo/pull/14336) + +### Internal + +- Enhanced `cargo-test-support` usability and documentation. + [#14266](https://github.com/rust-lang/cargo/pull/14266) + [#14268](https://github.com/rust-lang/cargo/pull/14268) + [#14269](https://github.com/rust-lang/cargo/pull/14269) + [#14270](https://github.com/rust-lang/cargo/pull/14270) + [#14272](https://github.com/rust-lang/cargo/pull/14272) +- Made summary sync by using Arc instead of Rc + [#14260](https://github.com/rust-lang/cargo/pull/14260) +- Used `Rc` instead of `Arc` for storing rustflags + [#14273](https://github.com/rust-lang/cargo/pull/14273) +- Removed rustc probe for `--check-cfg` support + [#14302](https://github.com/rust-lang/cargo/pull/14302) +- Renamed 'resolved' to 'normalized' for all manifest normalization related items. + [#14342](https://github.com/rust-lang/cargo/pull/14342) +- cargo-util-schemas: Added `TomlPackage::new`, `Default` for `TomlWorkspace` + [#14271](https://github.com/rust-lang/cargo/pull/14271) +- ci: Switch macos aarch64 to nightly + [#14382](https://github.com/rust-lang/cargo/pull/14382) +- mdman: Normalize newlines when rendering options + [#14428](https://github.com/rust-lang/cargo/pull/14428) +- perf: dont call wrap in a no-op `source_id::with*` + [#14318](https://github.com/rust-lang/cargo/pull/14318) +- test: Migrated more tests to snapbox + [#14242](https://github.com/rust-lang/cargo/pull/14242) + [#14244](https://github.com/rust-lang/cargo/pull/14244) + [#14293](https://github.com/rust-lang/cargo/pull/14293) + [#14297](https://github.com/rust-lang/cargo/pull/14297) + [#14319](https://github.com/rust-lang/cargo/pull/14319) + [#14402](https://github.com/rust-lang/cargo/pull/14402) + [#14410](https://github.com/rust-lang/cargo/pull/14410) +- test: don't rely on absence of `RUST_BACKTRACE` + [#14441](https://github.com/rust-lang/cargo/pull/14441) +- test: Use gmake on AIX + [#14323](https://github.com/rust-lang/cargo/pull/14323) +- Updated to `gix` 0.64.0 + [#14332](https://github.com/rust-lang/cargo/pull/14332) +- Updated to `rusqlite` 0.32.0 + [#14334](https://github.com/rust-lang/cargo/pull/14334) +- Updated to `windows-sys` 0.59 + [#14335](https://github.com/rust-lang/cargo/pull/14335) +- Update dependencies. + [#14299](https://github.com/rust-lang/cargo/pull/14299) + [#14303](https://github.com/rust-lang/cargo/pull/14303) + [#14324](https://github.com/rust-lang/cargo/pull/14324) + [#14329](https://github.com/rust-lang/cargo/pull/14329) + [#14331](https://github.com/rust-lang/cargo/pull/14331) + [#14391](https://github.com/rust-lang/cargo/pull/14391) + +## Cargo 1.81 (2024-09-05) +[34a6a87d...rust-1.81.0](https://github.com/rust-lang/cargo/compare/34a6a87d...rust-1.81.0) ### Added ### Changed +- ❗️ cargo-package: Disallow `package.license-file` and `package.readme` pointing + to non-existent files during packaging. +- ❗️ cargo-package: generated `.cargo_vcs_info.json` is always included, + even when `--allow-dirty` is passed. + [#13960](https://github.com/rust-lang/cargo/pull/13960) +- ❗️ Disallow passing `--release`/`--debug` flag along with the `--profile` flag. + [#13971](https://github.com/rust-lang/cargo/pull/13971) + [#13921](https://github.com/rust-lang/cargo/pull/13921) +- ❗️ Remove `lib.plugin` key support in Cargo.toml. + Rust plugin support has been deprecated for four years and was removed in 1.75.0. + [#13902](https://github.com/rust-lang/cargo/pull/13902) + [#14038](https://github.com/rust-lang/cargo/pull/14038) +- Make the calculation of `-Cmetadata` for rustc consistent across platforms. + [#14107](https://github.com/rust-lang/cargo/pull/14107) +- Emit a warning when `edition` is unset, even when MSRV is unset. + [#14110](https://github.com/rust-lang/cargo/pull/14110) + ### Fixed -- Replace dashes with underscores also if `lib.name` is inferred from `package.name`. +- Fix a proc-macro example from a dependency affecting feature resolution. + [#13892](https://github.com/rust-lang/cargo/pull/13892) +- Don't warn on duplicate packages from using '..'. + [#14234](https://github.com/rust-lang/cargo/pull/14234) +- Don't `du` on every git source load. + [#14252](https://github.com/rust-lang/cargo/pull/14252) +- Don't warn about unreferenced duplicate packages + [#14239](https://github.com/rust-lang/cargo/pull/14239) +- cargo-publish: Don't strip non-dev features for renamed dependencies from the + HTTP JSON body sent to the registry. + The bug only affected third-party registries. + [#14328](https://github.com/rust-lang/cargo/pull/14328) +- cargo-vendor: don't copy source files of excluded Cargo targets when vendoring. + [#14368](https://github.com/rust-lang/cargo/pull/14368) + +### Nightly only + +- πŸ”₯ `update-breaking`: Add `--breaking` to `cargo update`, + allowing upgrading dependencies to breaking versions. + [docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#update-breaking) + [#13979](https://github.com/rust-lang/cargo/pull/13979) + [#14047](https://github.com/rust-lang/cargo/pull/14047) + [#14049](https://github.com/rust-lang/cargo/pull/14049) +- `--artifact-dir`: Rename `--out-dir` to `--artifact-dir`. + The `--out-dir` flag is kept for compatibility + and may be removed when the feature gets stabilized. + [#13809](https://github.com/rust-lang/cargo/pull/13809) +- `edition2024`: Ensure unused optional dependencies fire for shadowed dependencies. + [#14028](https://github.com/rust-lang/cargo/pull/14028) +- `edition2024`: Address problems with implicit -> explicit feature migration + [#14018](https://github.com/rust-lang/cargo/pull/14018) +- `-Zcargo-lints`: Add `unknown_lints` to lints list. + [#14024](https://github.com/rust-lang/cargo/pull/14024) +- `-Zcargo-lints`: Add tooling to document lints. + [#14025](https://github.com/rust-lang/cargo/pull/14025) +- `-Zcargo-lints`: Keep lints updated and sorted. + [#14030](https://github.com/rust-lang/cargo/pull/14030) +- `-Zconfig-include`: Allow enabling `config-include` feature in config. + [#14196](https://github.com/rust-lang/cargo/pull/14196) +- `-Zpublic-dependency`: remove some legacy public dependency code from the resolver + [#14090](https://github.com/rust-lang/cargo/pull/14090) +- `-Ztarget-applies-to-host`: Pass rustflags to artifacts built with implicit targets when using target-applies-to-host + [#13900](https://github.com/rust-lang/cargo/pull/13900) + [#14201](https://github.com/rust-lang/cargo/pull/14201) +- cargo-update: Track the behavior of `--precise `. + [#14013](https://github.com/rust-lang/cargo/pull/14013) + +### Documentation + +- Clarify `CARGO_CFG_TARGET_FAMILY` is multi-valued. + [#14165](https://github.com/rust-lang/cargo/pull/14165) +- Document `CARGO_CFG_TARGET_ABI` + [#14164](https://github.com/rust-lang/cargo/pull/14164) +- Document MSRV for each manifest field and build script invocations. + [#14224](https://github.com/rust-lang/cargo/pull/14224) +- Remove duplicate `strip` section. + [#14146](https://github.com/rust-lang/cargo/pull/14146) +- Update summary of Cargo configuration to include missing keys. + [#14145](https://github.com/rust-lang/cargo/pull/14145) +- Update index of Cargo documentation. + [#14228](https://github.com/rust-lang/cargo/pull/14228) +- Don't mention non-existent `workspace.badges` field. + [#14042](https://github.com/rust-lang/cargo/pull/14042) +- contrib: Suggest atomic commits with separate test commits. + [#14014](https://github.com/rust-lang/cargo/pull/14014) +- contrib: Document how to write an RFC for Cargo. + [#14222](https://github.com/rust-lang/cargo/pull/14222) +- contrib: Improve triage instructions + [#14052](https://github.com/rust-lang/cargo/pull/14052) + +### Internal + +- cargo-package: Change verification order during packaging. + [#14074](https://github.com/rust-lang/cargo/pull/14074) +- ci: Add workflow to publish Cargo automatically + [#14202](https://github.com/rust-lang/cargo/pull/14202) +- ci: bump CI tools + [#14062](https://github.com/rust-lang/cargo/pull/14062) + [#14257](https://github.com/rust-lang/cargo/pull/14257) +- registry: Add local registry overlays. + [#13926](https://github.com/rust-lang/cargo/pull/13926) +- registry: move `get_source_id` out of registry + [#14218](https://github.com/rust-lang/cargo/pull/14218) +- resolver: Simplify checking for dependency cycles + [#14089](https://github.com/rust-lang/cargo/pull/14089) +- rustfix: Add `CodeFix::apply_solution` and impl `Clone` + [#14092](https://github.com/rust-lang/cargo/pull/14092) +- source: Clean up after `PathSource`/`RecursivePathSource` split + [#14169](https://github.com/rust-lang/cargo/pull/14169) + [#14231](https://github.com/rust-lang/cargo/pull/14231) +- Remove the temporary `__CARGO_GITOXIDE_DISABLE_LIST_FILES` environment variable. + [#14036](https://github.com/rust-lang/cargo/pull/14036) +- Simplify checking feature syntax + [#14106](https://github.com/rust-lang/cargo/pull/14106) +- Dont make new constant `InternedString` in hot path + [#14211](https://github.com/rust-lang/cargo/pull/14211) +- Use `std::fs::absolute` instead of reimplementing it + [#14075](https://github.com/rust-lang/cargo/pull/14075) +- Remove unecessary feature activations from cargo. + [#14122](https://github.com/rust-lang/cargo/pull/14122) + [#14160](https://github.com/rust-lang/cargo/pull/14160) +- Revert #13630 as rustc ignores `-C strip` on MSVC. + [#14061](https://github.com/rust-lang/cargo/pull/14061) +- test: Allow `unexpected_builtin_cfgs` lint in `user_specific_cfgs` test + [#14153](https://github.com/rust-lang/cargo/pull/14153) +- test: Add cargo_test to test-support prelude + [#14243](https://github.com/rust-lang/cargo/pull/14243) +- test: migrate Cargo testsuite to `snapbox`. + For the complete list of migration pull requests, + see [#14039](https://github.com/rust-lang/cargo/issues/14039#issuecomment-2158974033) +- Updated to `gix` 0.64.0 + [#14431](https://github.com/rust-lang/cargo/pull/14431) +- Update dependencies. + [#13995](https://github.com/rust-lang/cargo/pull/13995) + [#13998](https://github.com/rust-lang/cargo/pull/13998) + [#14037](https://github.com/rust-lang/cargo/pull/14037) + [#14063](https://github.com/rust-lang/cargo/pull/14063) + [#14067](https://github.com/rust-lang/cargo/pull/14067) + [#14174](https://github.com/rust-lang/cargo/pull/14174) + [#14186](https://github.com/rust-lang/cargo/pull/14186) + [#14254](https://github.com/rust-lang/cargo/pull/14254) + +## Cargo 1.80 (2024-07-25) +[b60a1555...rust-1.80.0](https://github.com/rust-lang/cargo/compare/b60a1555...rust-1.80.0) + +### Added + +- πŸŽ‰ Stabilize `-Zcheck-cfg`! This by default enables rustc's checking of + conditional compilation at compile time, which verifies that the crate is + correctly handling conditional compilation for different target platforms or + features. Internally, cargo will be passing a new command line option + `--check-cfg` to all rustc and rustdoc invocations. + + A new build script invocation + [`cargo::rustc-check-cfg=CHECK_CFG`](https://doc.rust-lang.org/nightly/cargo/reference/build-scripts.html#rustc-check-cfg) + is added along with this stabilization, as a way to add custom cfgs to the + list of expected cfg names and values. + + If a build script is not an option for your package, Cargo provides a config + [`[lints.rust.unexpected_cfgs.check-cfg]`](https://doc.rust-lang.org/nightly/rustc/check-cfg/cargo-specifics.html#check-cfg-in-lintsrust-table) + to add known custom cfgs statically. + + ([RFC 3013](https://github.com/rust-lang/rfcs/blob/master/text/3013-conditional-compilation-checking.md)) + ([docs](https://doc.rust-lang.org/nightly/rustc/check-cfg/cargo-specifics.html)) + [#13571](https://github.com/rust-lang/cargo/pull/13571) + [#13865](https://github.com/rust-lang/cargo/pull/13865) + [#13869](https://github.com/rust-lang/cargo/pull/13869) + [#13884](https://github.com/rust-lang/cargo/pull/13884) + [#13913](https://github.com/rust-lang/cargo/pull/13913) + [#13937](https://github.com/rust-lang/cargo/pull/13937) + [#13958](https://github.com/rust-lang/cargo/pull/13958) + +- πŸŽ‰ cargo-update: Allows `--precise` to specify a yanked version of a package, + and will update the lockfile accordingly. + [#13974](https://github.com/rust-lang/cargo/pull/13974) + +### Changed + +- ❗️ manifest: Disallow `[badges]` to inherit from `[workspace.package.badges]`. + This was considered a bug. + Keep in mind that `[badges]` is effectively deprecated. + [#13788](https://github.com/rust-lang/cargo/pull/13788) +- build-script: Suggest old syntax based on MSRV. + [#13874](https://github.com/rust-lang/cargo/pull/13874) +- cargo-add: Avoid escaping double quotes by using string literals. + [#14006](https://github.com/rust-lang/cargo/pull/14006) +- cargo-clean: Performance improvements for cleaning specific packages via `-p` flag. + [#13818](https://github.com/rust-lang/cargo/pull/13818) +- cargo-new: Use `i32` rather than `usize` as the "default integer" in library template. + [#13939](https://github.com/rust-lang/cargo/pull/13939) +- cargo-package: Warn, rather than fail, if a Cargo target is excluded during packaging. + [#13713](https://github.com/rust-lang/cargo/pull/13713) +- manifest: Warn, not error, on unsupported lint tool in the `[lints]` table. + [#13833](https://github.com/rust-lang/cargo/pull/13833) +- perf: Avoid inferring when Cargo targets are known. + [#13849](https://github.com/rust-lang/cargo/pull/13849) +- Populate git information when building Cargo from Rust's source tarball. + [#13832](https://github.com/rust-lang/cargo/pull/13832) +- Improve the error message when deserializing Cargo configuration from partial environment variables. + [#13956](https://github.com/rust-lang/cargo/pull/13956) + +### Fixed + +- resolver: Make path dependencies with the same name stay locked. + [#13572](https://github.com/rust-lang/cargo/pull/13572) +- cargo-add: Preserve file permissions on Unix during `write_atomic`. + [#13898](https://github.com/rust-lang/cargo/pull/13898) +- cargo-clean: Remove symlink directory on Windows. + [#13910](https://github.com/rust-lang/cargo/pull/13910) +- cargo-fix: Don't fix into the standard library. + [#13792](https://github.com/rust-lang/cargo/pull/13792) +- cargo-fix: Support IPv6-only networks. + [#13907](https://github.com/rust-lang/cargo/pull/13907) +- cargo-new: Don't say we're adding to a workspace when a regular package is in the root. + [#13987](https://github.com/rust-lang/cargo/pull/13987) +- cargo-vendor: Silence the warning about forgetting the vendoring. + [#13886](https://github.com/rust-lang/cargo/pull/13886) +- cargo-publish/cargo-vendor: Ensure targets in generated Cargo.toml are in a deterministic order. + [#13989](https://github.com/rust-lang/cargo/pull/13989) + [#14004](https://github.com/rust-lang/cargo/pull/14004) +- cargo-credential-libsecret: Load `libsecret` by its `SONAME`, `libsecret-1.so.0`. + [#13927](https://github.com/rust-lang/cargo/pull/13927) +- Don't panic when an alias doesn't include a subcommand. + [#13819](https://github.com/rust-lang/cargo/pull/13819) +- Workaround copying file returning EAGAIN on ZFS on macOS. + [#13845](https://github.com/rust-lang/cargo/pull/13845) +- Fetch specific commits even if the GitHub fast path fails. + [#13946](https://github.com/rust-lang/cargo/pull/13946) + [#13969](https://github.com/rust-lang/cargo/pull/13969) +- Distinguish Cargo config from different environment variables that share the same prefix. + [#14000](https://github.com/rust-lang/cargo/pull/14000) + +### Nightly only + +- `-Zcargo-lints`: Don't always inherit workspace lints. + [#13812](https://github.com/rust-lang/cargo/pull/13812) +- `-Zcargo-lints`: Add a test to ensure cap-lints works. + [#13829](https://github.com/rust-lang/cargo/pull/13829) +- `-Zcargo-lints`: Error when unstable lints are specified but not enabled. + [#13805](https://github.com/rust-lang/cargo/pull/13805) +- `-Zcargo-lints`: Add cargo-lints to unstable docs. + [#13881](https://github.com/rust-lang/cargo/pull/13881) +- `-Zcargo-lints`: Refactor cargo lint tests. + [#13880](https://github.com/rust-lang/cargo/pull/13880) +- `-Zcargo-lints`: Remove ability to specify `-` in lint name. + [#13837](https://github.com/rust-lang/cargo/pull/13837) +- `-Zscript`: Remove unstable rejected frontmatter syntax for cargo script. + The only allowed frontmatter syntax now is `---`. + [#13861](https://github.com/rust-lang/cargo/pull/13861) + [#13893](https://github.com/rust-lang/cargo/pull/13893) +- `-Zbindeps`: Build only the specified artifact library when multiple types are available. + [#13842](https://github.com/rust-lang/cargo/pull/13842) +- `-Zmsrv-policy`: Treat unset MSRV as compatible. + [#13791](https://github.com/rust-lang/cargo/pull/13791) +- `-Zgit`/`-Zgitoxide`: Default configuration to be obtained from both environment variables and Cargo configuration. + [#13687](https://github.com/rust-lang/cargo/pull/13687) +- `-Zpublic-dependency`: Don't lose 'public' when inheriting a dependency. + [#13836](https://github.com/rust-lang/cargo/pull/13836) +- `edition2024`: Disallow ignored `default-features` when inheriting. + [#13839](https://github.com/rust-lang/cargo/pull/13839) +- `edition2024`: Validate crate-types/proc-macro for bin like other Cargo targets. + [#13841](https://github.com/rust-lang/cargo/pull/13841) + +### Documentation + +- cargo-package: Clarify no guarantee of VCS provenance. + [#13984](https://github.com/rust-lang/cargo/pull/13984) +- cargo-metadata: Clarify dash replacement rule in Cargo target names. + [#13887](https://github.com/rust-lang/cargo/pull/13887) +- config: Fix wrong type of `rustc-flags` in build script overrides. + [#13957](https://github.com/rust-lang/cargo/pull/13957) +- resolver: Add README for `resolver-tests`. + [#13977](https://github.com/rust-lang/cargo/pull/13977) +- contrib: Update UI example code in contributor guide. + [#13864](https://github.com/rust-lang/cargo/pull/13864) +- Fix libcurl proxy documentation link. + [#13990](https://github.com/rust-lang/cargo/pull/13990) +- Add missing `CARGO_MAKEFLAGS` env for plugins. + [#13872](https://github.com/rust-lang/cargo/pull/13872) +- Include CircleCI reference in the Continuous Integration chapter. + [#13850](https://github.com/rust-lang/cargo/pull/13850) + +### Internal + +- ci: Don't check `cargo` against beta channel. + [#13827](https://github.com/rust-lang/cargo/pull/13827) +- test: Set safe.directory for git repo in apache container. + [#13920](https://github.com/rust-lang/cargo/pull/13920) +- test: Silence warnings running embedded unittests. + [#13929](https://github.com/rust-lang/cargo/pull/13929) +- test: Update test formatting due to nightly rustc changes. + [#13890](https://github.com/rust-lang/cargo/pull/13890) + [#13901](https://github.com/rust-lang/cargo/pull/13901) + [#13964](https://github.com/rust-lang/cargo/pull/13964) +- test: Make `git::use_the_cli` test truly locale independent. + [#13935](https://github.com/rust-lang/cargo/pull/13935) +- cargo-test-support: Transition direct assertions from cargo-test-support to snapbox. + [#13980](https://github.com/rust-lang/cargo/pull/13980) +- cargo-test-support: Auto-redact elapsed time. + [#13973](https://github.com/rust-lang/cargo/pull/13973) +- cargo-test-support: Clean up unnecessary uses of `match_exact`. + [#13879](https://github.com/rust-lang/cargo/pull/13879) +- Split `RecursivePathSource` out of `PathSource`. + [#13993](https://github.com/rust-lang/cargo/pull/13993) +- Adjust custom errors from cert-check due to libgit2 1.8 change. + [#13970](https://github.com/rust-lang/cargo/pull/13970) +- Move diagnostic printing to Shell. + [#13813](https://github.com/rust-lang/cargo/pull/13813) +- Update dependencies. + [#13834](https://github.com/rust-lang/cargo/pull/13834) + [#13840](https://github.com/rust-lang/cargo/pull/13840) + [#13948](https://github.com/rust-lang/cargo/pull/13948) + [#13963](https://github.com/rust-lang/cargo/pull/13963) + [#13976](https://github.com/rust-lang/cargo/pull/13976) + +## Cargo 1.79 (2024-06-13) +[2fe739fc...rust-1.79.0](https://github.com/rust-lang/cargo/compare/2fe739fc...rust-1.79.0) + +### Added + +- πŸŽ‰ `cargo add` respects `package.rust-version` a.k.a. MSRV when adding new + dependencies. The behavior can be overridden by specifying a version requirement, + or passing the `--ignore-rust-version` flag. + ([RFC 3537](https://github.com/rust-lang/rfcs/blob/master/text/3537-msrv-resolver.md)) + [#13608](https://github.com/rust-lang/cargo/pull/13608) +- A new `Locking` status message shows dependency changes on any command. + For `cargo update`, it also tells you if any dependency version is outdated. + [#13561](https://github.com/rust-lang/cargo/pull/13561) + [#13647](https://github.com/rust-lang/cargo/pull/13647) + [#13651](https://github.com/rust-lang/cargo/pull/13651) + [#13657](https://github.com/rust-lang/cargo/pull/13657) + [#13759](https://github.com/rust-lang/cargo/pull/13759) + [#13764](https://github.com/rust-lang/cargo/pull/13764) + +### Changed + +- ❗️ `RUSTC_WRAPPER`, `RUSTC_WORKSPACE_WRAPPER`, and variables from the `[env]` + table now also apply to the initial `rustc -vV` invocation Cargo uses for + probing rustc information. + [#13659](https://github.com/rust-lang/cargo/pull/13659) +- ❗️ Turns dependencies like `foo = { optional = true }` from `version="*"` + dependencies with a warning into errors. + This behavior has been considered a bug from the beginning. + [#13775](https://github.com/rust-lang/cargo/pull/13775) +- ❗️ Replace dashes with underscores also if `lib.name` is inferred from `package.name`. + This change aligns to the documented behavior. One caveat is that JSON messages + emitted by Cargo, like via `cargo metadata` or `--message-format=json`, + will start reporting underscore lib names. [#12783](https://github.com/rust-lang/cargo/pull/12783) +- Switch to `gitoxide` for listing files. This improves the performance of + build script and `cargo doc` for computing cache freshness, + as well as fixes some subtle bugs for `cargo publish`. + [#13592](https://github.com/rust-lang/cargo/pull/13592) + [#13696](https://github.com/rust-lang/cargo/pull/13696) + [#13704](https://github.com/rust-lang/cargo/pull/13704) + [#13777](https://github.com/rust-lang/cargo/pull/13777) +- Warn on `-Zlints` being passed and no longer necessary. + [#13632](https://github.com/rust-lang/cargo/pull/13632) +- Warn on unused `workspace.dependencies` keys on virtual workspaces. + [#13664](https://github.com/rust-lang/cargo/pull/13664) +- Emit 1.77 build script syntax error only when msrv is incompatible. + [#13808](https://github.com/rust-lang/cargo/pull/13808) +- Don't warn on `lints.rust.unexpected_cfgs.check-cfg`. + [#13925](https://github.com/rust-lang/cargo/pull/13925) +- cargo-init: don't assign `target.name` in Cargo.toml if the value can be inferred. + [#13606](https://github.com/rust-lang/cargo/pull/13606) +- cargo-package: normalize paths in `Cargo.toml`, including replacing `\` with `/`. + [#13729](https://github.com/rust-lang/cargo/pull/13729) +- cargo-test: recategorize cargo test's `--doc` flag under β€œTarget Selection”. + [#13756](https://github.com/rust-lang/cargo/pull/13756) + +### Fixed + +- Ensure `--config net.git-fetch-with-cli=true` is respected. + [#13992](https://github.com/rust-lang/cargo/pull/13992) + [#13997](https://github.com/rust-lang/cargo/pull/13997) +- Dont panic when resolving an empty alias. + [#13613](https://github.com/rust-lang/cargo/pull/13613) +- When using `--target`, the default debuginfo strip rule also applies. + Note that on Windows MSVC Cargo no longer strips by default. + [#13618](https://github.com/rust-lang/cargo/pull/13618) +- Don't crash on Cargo.toml parse errors that point to multi-byte character + [#13780](https://github.com/rust-lang/cargo/pull/13780) +- Don't emit deprecation warning if one of `.cargo/{config,config.toml}` is + a symlink to the other. + [#13793](https://github.com/rust-lang/cargo/pull/13793) +- Follow HTTP redirections when checking if a repo on GitHub is up-to-date. + [#13718](https://github.com/rust-lang/cargo/pull/13718) +- Bash completion fallback in `nounset` mode. + [#13686](https://github.com/rust-lang/cargo/pull/13686) +- Rerun build script when rustflags changed and `--target` was passed. + [#13560](https://github.com/rust-lang/cargo/pull/13560) +- Fix doc collision for lib/bin with a dash in the inferred name. + [#13640](https://github.com/rust-lang/cargo/pull/13640) +- cargo-add: Maintain sorting of dependency features. + [#13682](https://github.com/rust-lang/cargo/pull/13682) +- cargo-add: Preserve comments when updating simple deps + [#13655](https://github.com/rust-lang/cargo/pull/13655) +- cargo-fix: dont apply same suggestion twice. + [#13728](https://github.com/rust-lang/cargo/pull/13728) +- cargo-package: error when the package specified via `--package` cannot be found + [#13735](https://github.com/rust-lang/cargo/pull/13735) +- credential-provider: trim newlines in tokens from stdin. + [#13770](https://github.com/rust-lang/cargo/pull/13770) ### Nightly only +- πŸ”₯ cargo-update: allows `--precise` to specify a pre-release version of a package + ([RFC 3493](https://github.com/rust-lang/rfcs/blob/master/text/3493-precise-pre-release-cargo-update.md)) + ([docs](https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#precise-pre-release)) + [#13626](https://github.com/rust-lang/cargo/pull/13626) +- RFC 3491: Unused dependencies cleanup + [#13778](https://github.com/rust-lang/cargo/pull/13778) +- `-Zcargo-lints`: Add a basic linting system for Cargo. + This is still under development and not available for general use. + [#13621](https://github.com/rust-lang/cargo/pull/13621) + [#13635](https://github.com/rust-lang/cargo/pull/13635) + [#13797](https://github.com/rust-lang/cargo/pull/13797) + [#13740](https://github.com/rust-lang/cargo/pull/13740) + [#13801](https://github.com/rust-lang/cargo/pull/13801) + [#13852](https://github.com/rust-lang/cargo/pull/13852) + [#13853](https://github.com/rust-lang/cargo/pull/13853) +- πŸ”₯ `edition2024`: Add default Edition2024 to resolver v3 (MSRV-aware resolver). + [#13785](https://github.com/rust-lang/cargo/pull/13785) +- `edition2024`: Remove underscore field support in 2024. + [#13783](https://github.com/rust-lang/cargo/pull/13783) + [#13798](https://github.com/rust-lang/cargo/pull/13798) + [#13800](https://github.com/rust-lang/cargo/pull/13800) + [#13804](https://github.com/rust-lang/cargo/pull/13804) +- `edition2024`: Error on `[project]` in Edition 2024 + [#13747](https://github.com/rust-lang/cargo/pull/13747) +- `-Zmsrv-policy`: Respect '--ignore-rust-version' + [#13738](https://github.com/rust-lang/cargo/pull/13738) +- `-Zmsrv-policy`: Add `--ignore-rust-version` to update/generate-lockfile + [#13741](https://github.com/rust-lang/cargo/pull/13741) + [#13742](https://github.com/rust-lang/cargo/pull/13742) +- `-Zmsrv-policy`: Put MSRV-aware resolver behind a config + [#13769](https://github.com/rust-lang/cargo/pull/13769) +- `-Zmsrv-policy`: Error, rather than panic, on rust-version 'x' + [#13771](https://github.com/rust-lang/cargo/pull/13771) +- `-Zmsrv-policy`: Fallback to 'rustc -V' for MSRV resolving. + [#13743](https://github.com/rust-lang/cargo/pull/13743) +- `-Zmsrv-policy`: Add v3 resolver for MSRV-aware resolving + [#13776](https://github.com/rust-lang/cargo/pull/13776) +- `-Zmsrv-policy`: Don't respect MSRV for non-local installs + [#13790](https://github.com/rust-lang/cargo/pull/13790) +- `-Zmsrv-policy`: Track when MSRV is explicitly set, either way + [#13732](https://github.com/rust-lang/cargo/pull/13732) +- test: don't compress test registry crates. + [#13744](https://github.com/rust-lang/cargo/pull/13744) + +### Documentation + +- Clarify `--locked` ensuring that Cargo uses dependency versions in lockfile + [#13665](https://github.com/rust-lang/cargo/pull/13665) +- Clarify the precedence of `RUSTC_WORKSPACE_WRAPPER` and `RUSTC_WRAPPER`. + [#13648](https://github.com/rust-lang/cargo/pull/13648) +- Clarify only in the root Cargo.toml the `[workspace]` section is allowed. + [#13753](https://github.com/rust-lang/cargo/pull/13753) +- Clarify the differences between virtual and real manifests. + [#13794](https://github.com/rust-lang/cargo/pull/13794) + +### Internal + +- πŸŽ‰ New member crates [`cargo-test-support`](https://crates.io/crates/cargo-test-support) + and [`cargo-test-macro`](https://crates.io/crates/cargo-test-macro)! + They are designed for testing Cargo itself, + so no guarantee on any stability across versions. + The crates.io publish of this crate is the same as other members crates. + They follow Rust's [6-week release process](https://doc.crates.io/contrib/process/release.html#cratesio-publishing). + [#13418](https://github.com/rust-lang/cargo/pull/13418) +- Fix publish script due to crates.io CDN change + [#13614](https://github.com/rust-lang/cargo/pull/13614) +- Push diagnostic complexity on annotate-snippets + [#13619](https://github.com/rust-lang/cargo/pull/13619) +- cargo-package: Simplify getting of published Manifest + [#13666](https://github.com/rust-lang/cargo/pull/13666) +- ci: update macos images to macos-13 + [#13685](https://github.com/rust-lang/cargo/pull/13685) +- manifest: Split out an explicit step to resolve `Cargo.toml` + [#13693](https://github.com/rust-lang/cargo/pull/13693) +- manifest: Decouple target discovery from Target creation + [#13701](https://github.com/rust-lang/cargo/pull/13701) +- manifest: Expose surce/spans for VirtualManifests + [#13603](https://github.com/rust-lang/cargo/pull/13603) +- Update dependencies + [#13609](https://github.com/rust-lang/cargo/pull/13609) + [#13674](https://github.com/rust-lang/cargo/pull/13674) + [#13675](https://github.com/rust-lang/cargo/pull/13675) + [#13679](https://github.com/rust-lang/cargo/pull/13679) + [#13680](https://github.com/rust-lang/cargo/pull/13680) + [#13692](https://github.com/rust-lang/cargo/pull/13692) + [#13731](https://github.com/rust-lang/cargo/pull/13731) + [#13760](https://github.com/rust-lang/cargo/pull/13760) + [#13950](https://github.com/rust-lang/cargo/pull/13950) + ## Cargo 1.78 (2024-05-02) [7bb7b539...rust-1.78.0](https://github.com/rust-lang/cargo/compare/7bb7b539...rust-1.78.0) @@ -1002,7 +1676,7 @@ [#12332](https://github.com/rust-lang/cargo/pull/12332) - ❗️ `cargo login` no longer accept any token after the `--` syntax. Arguments after `--` are now reserved in the preparation of the new credential provider feature. - This introduces a regression that overlooks the `cargo login -- ` support in preivous versions. + This introduces a regression that overlooks the `cargo login -- ` support in previous versions. [#12499](https://github.com/rust-lang/cargo/pull/12499) - Make Cargo `--help` easier to browse. [#11905](https://github.com/rust-lang/cargo/pull/11905) diff --git a/Cargo.lock b/Cargo.lock index af89b321980..404e5d2c9bd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -31,9 +31,9 @@ dependencies = [ [[package]] name = "allocator-api2" -version = "0.2.16" +version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5" +checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" [[package]] name = "anes" @@ -43,79 +43,80 @@ checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" [[package]] name = "annotate-snippets" -version = "0.11.1" +version = "0.11.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "238760b2324c811147d933e41f1743e4a8e309c8f3a15f417232e5980e5ce765" +checksum = "24e35ed54e5ea7997c14ed4c70ba043478db1112e98263b3b035907aa197d991" dependencies = [ "anstyle", - "unicode-width", + "unicode-width 0.1.13", ] [[package]] name = "anstream" -version = "0.6.13" +version = "0.6.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d96bd03f33fe50a863e394ee9718a706f988b9079b20c3784fb726e7678b62fb" +checksum = "64e15c1ab1f89faffbf04a634d5e1962e9074f2741eef6d97f3c4e322426d526" dependencies = [ "anstyle", "anstyle-parse", "anstyle-query", "anstyle-wincon", "colorchoice", + "is_terminal_polyfill", "utf8parse", ] [[package]] name = "anstyle" -version = "1.0.6" +version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8901269c6307e8d93993578286ac0edf7f195079ffff5ebdeea6a59ffb7e36bc" +checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1" [[package]] name = "anstyle-lossy" -version = "1.1.0" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9a0444767dbd4aea9355cb47a370eb184dbfe918875e127eff52cb9d1638181" +checksum = "f45c79b3b9413932fc255f2c19ca0d48eaab72c4ea1913bafaebf289cbc099f2" dependencies = [ "anstyle", ] [[package]] name = "anstyle-parse" -version = "0.2.3" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c" +checksum = "eb47de1e80c2b463c735db5b217a0ddc39d612e7ac9e2e96a5aed1f57616c1cb" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.0.2" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648" +checksum = "6d36fc52c7f6c869915e99412912f22093507da8d9e942ceaf66fe4b7c14422a" dependencies = [ "windows-sys 0.52.0", ] [[package]] name = "anstyle-svg" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b6ddad447b448d6d5db36b31cbd3ff27c7af071619501998eeceab01968287a" +checksum = "bbbf0bf947d663010f0b4132f28ca08da9151f3b9035fa7578a38de521c1d1aa" dependencies = [ "anstream", "anstyle", "anstyle-lossy", "html-escape", - "unicode-width", + "unicode-width 0.1.13", ] [[package]] name = "anstyle-wincon" -version = "3.0.2" +version = "3.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7" +checksum = "5bf74e1b6e971609db8ca7a9ce79fd5768ab6ae46441c572e46cf596f59e57f8" dependencies = [ "anstyle", "windows-sys 0.52.0", @@ -123,9 +124,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.81" +version = "1.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0952808a6c2afd1aa8947271f3a60f1a6763c7b912d210184c5149b5cf147247" +checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" [[package]] name = "arc-swap" @@ -133,11 +134,23 @@ version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457" +[[package]] +name = "arrayref" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b4930d2cb77ce62f89ee5d5289b4ac049559b1c45539271f5ed4fdc7db34545" + +[[package]] +name = "arrayvec" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" + [[package]] name = "autocfg" -version = "1.2.0" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1fdabc7756949593fe60f30ec81974b613357de856987752631dea1e3394c80" +checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" [[package]] name = "base16ct" @@ -147,15 +160,9 @@ checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" [[package]] name = "base64" -version = "0.21.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" - -[[package]] -name = "base64" -version = "0.22.0" +version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9475866fec1451be56a3c2400fd081ff546538961565ccb5b7142cbd22bc7a51" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" [[package]] name = "base64ct" @@ -199,9 +206,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.5.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1" +checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" [[package]] name = "bitmaps" @@ -212,6 +219,19 @@ dependencies = [ "typenum", ] +[[package]] +name = "blake3" +version = "1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d08263faac5cde2a4d52b513dadb80846023aade56fcd8fc99ba73ba8050e92" +dependencies = [ + "arrayref", + "arrayvec", + "cc", + "cfg-if", + "constant_time_eq", +] + [[package]] name = "block-buffer" version = "0.10.4" @@ -228,15 +248,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "05efc5cfd9110c8416e471df0e96702d58690178e206e61b7173706673c93706" dependencies = [ "memchr", - "regex-automata 0.4.6", + "regex-automata 0.4.7", "serde", ] [[package]] name = "bumpalo" -version = "3.15.4" +version = "3.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ff69b9dd49fd426c69a0db9fc04dd934cdb6645ff000864d98f7e2af8830eaa" +checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" [[package]] name = "bytes" @@ -271,24 +291,25 @@ dependencies = [ [[package]] name = "cargo" -version = "0.80.0" +version = "0.84.0" dependencies = [ "annotate-snippets", "anstream", "anstyle", "anyhow", - "base64 0.22.0", + "base64", + "blake3", "bytesize", "cargo-credential", "cargo-credential-libsecret", "cargo-credential-macos-keychain", "cargo-credential-wincred", - "cargo-platform 0.1.8", - "cargo-test-macro", + "cargo-platform 0.1.9", "cargo-test-support", "cargo-util", "cargo-util-schemas", "clap", + "clap_complete", "color-print", "crates-io", "curl", @@ -307,7 +328,7 @@ dependencies = [ "ignore", "im-rc", "indexmap", - "itertools 0.12.1", + "itertools 0.13.0", "jobserver", "lazycell", "libc", @@ -335,6 +356,7 @@ dependencies = [ "supports-unicode", "tar", "tempfile", + "thiserror", "time", "toml", "toml_edit", @@ -342,15 +364,15 @@ dependencies = [ "tracing-chrome", "tracing-subscriber", "unicase", - "unicode-width", + "unicode-width 0.2.0", "url", "walkdir", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "cargo-credential" -version = "0.4.5" +version = "0.4.7" dependencies = [ "anyhow", "libc", @@ -359,12 +381,12 @@ dependencies = [ "snapbox", "thiserror", "time", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "cargo-credential-1password" -version = "0.4.4" +version = "0.4.5" dependencies = [ "cargo-credential", "serde", @@ -373,7 +395,7 @@ dependencies = [ [[package]] name = "cargo-credential-libsecret" -version = "0.4.5" +version = "0.4.9" dependencies = [ "anyhow", "cargo-credential", @@ -382,7 +404,7 @@ dependencies = [ [[package]] name = "cargo-credential-macos-keychain" -version = "0.4.5" +version = "0.4.9" dependencies = [ "cargo-credential", "security-framework", @@ -390,35 +412,35 @@ dependencies = [ [[package]] name = "cargo-credential-wincred" -version = "0.4.5" +version = "0.4.9" dependencies = [ "cargo-credential", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "cargo-platform" version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24b1f0365a6c6bb4020cd05806fd0d33c44d38046b8bd7f0e40814b9763cabfc" dependencies = [ "serde", ] [[package]] name = "cargo-platform" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24b1f0365a6c6bb4020cd05806fd0d33c44d38046b8bd7f0e40814b9763cabfc" +version = "0.1.9" dependencies = [ "serde", ] [[package]] name = "cargo-test-macro" -version = "0.2.0" +version = "0.3.2" [[package]] name = "cargo-test-support" -version = "0.2.0" +version = "0.5.0" dependencies = [ "anstream", "anstyle", @@ -430,8 +452,9 @@ dependencies = [ "flate2", "git2", "glob", - "itertools 0.12.1", + "itertools 0.13.0", "pasetors", + "regex", "serde", "serde_json", "snapbox", @@ -440,15 +463,15 @@ dependencies = [ "toml", "url", "walkdir", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "cargo-util" -version = "0.2.12" +version = "0.2.16" dependencies = [ "anyhow", - "core-foundation", + "core-foundation 0.10.0", "filetime", "hex", "ignore", @@ -461,12 +484,12 @@ dependencies = [ "tempfile", "tracing", "walkdir", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "cargo-util-schemas" -version = "0.3.1" +version = "0.7.0" dependencies = [ "semver", "serde", @@ -486,7 +509,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2d886547e41f740c616ae73108f6eb70afe6d940c7bc697cb30f13daec073037" dependencies = [ "camino", - "cargo-platform 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", + "cargo-platform 0.1.8", "semver", "serde", "serde_json", @@ -501,12 +524,13 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" [[package]] name = "cc" -version = "1.0.90" +version = "1.1.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8cd6604a82acf3039f1144f54b8eb34e91ffba622051189e71b781822d5ee1f5" +checksum = "9540e661f81799159abee814118cc139a2004b3a3aa3ea37724a1b66530b90e0" dependencies = [ "jobserver", "libc", + "shlex", ] [[package]] @@ -544,18 +568,18 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.4" +version = "4.5.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90bc066a67923782aa8515dbaea16946c5bcc5addbd668bb80af688e53e548a0" +checksum = "b0956a43b323ac1afaffc053ed5c4b7c1f1800bacd1683c353aabbb752515dd3" dependencies = [ "clap_builder", ] [[package]] name = "clap_builder" -version = "4.5.2" +version = "4.5.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae129e2e766ae0ec03484e609954119f123cc1fe650337e155d03b022f24f7b4" +checksum = "4d72166dd41634086d5803a47eb71ae740e61d84709c36f3c34110173db3961b" dependencies = [ "anstream", "anstyle", @@ -564,11 +588,23 @@ dependencies = [ "terminal_size", ] +[[package]] +name = "clap_complete" +version = "4.5.32" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74a01f4f9ee6c066d42a1c8dedf0dcddad16c72a8981a309d6398de3a75b0c39" +dependencies = [ + "clap", + "clap_lex", + "is_executable", + "shlex", +] + [[package]] name = "clap_lex" -version = "0.7.0" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "98cc8fbded0c607b7ba9dd60cd98df59af97e84d24e49c8557331cfc26d301ce" +checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97" [[package]] name = "clru" @@ -578,30 +614,30 @@ checksum = "b8191fa7302e03607ff0e237d4246cc043ff5b3cb9409d995172ba3bea16b807" [[package]] name = "color-print" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a858372ff14bab9b1b30ea504f2a4bc534582aee3e42ba2d41d2a7baba63d5d" +checksum = "1ee543c60ff3888934877a5671f45494dd27ed4ba25c6670b9a7576b7ed7a8c0" dependencies = [ "color-print-proc-macro", ] [[package]] name = "color-print-proc-macro" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57e37866456a721d0a404439a1adae37a31be4e0055590d053dfe6981e05003f" +checksum = "77ff1a80c5f3cb1ca7c06ffdd71b6a6dd6d8f896c42141fbd43f50ed28dcdb93" dependencies = [ "nom", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.72", ] [[package]] name = "colorchoice" -version = "1.0.0" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" +checksum = "d3fd119d74b830634cea2a0f58bbd0d54540518a14397557951e79340abc28c0" [[package]] name = "const-oid" @@ -609,6 +645,12 @@ version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" +[[package]] +name = "constant_time_eq" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7144d30dcf0fafbce74250a3963025d8d52177934239851c917d29f1df280c2" + [[package]] name = "content_inspector" version = "0.2.4" @@ -628,11 +670,21 @@ dependencies = [ "libc", ] +[[package]] +name = "core-foundation" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b55271e5c8c478ad3f38ad24ef34923091e0548492a266d19b3c0b4d82574c63" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "core-foundation-sys" -version = "0.8.6" +version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "cpufeatures" @@ -645,7 +697,7 @@ dependencies = [ [[package]] name = "crates-io" -version = "0.40.2" +version = "0.40.6" dependencies = [ "curl", "percent-encoding", @@ -785,9 +837,9 @@ dependencies = [ [[package]] name = "curl-sys" -version = "0.4.72+curl-8.6.0" +version = "0.4.74+curl-8.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29cbdc8314c447d11e8fd156dcdd031d9e02a7a976163e396b548c03153bc9ea" +checksum = "8af10b986114528fcdc4b63b6f5f021b7057618411046a4de2ba0f0149a097bf" dependencies = [ "cc", "libc", @@ -916,18 +968,19 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "erased-serde" -version = "0.4.4" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b73807008a3c7f171cc40312f37d95ef0396e048b5848d775f54b1a4dd4a0d3" +checksum = "24e2389d65ab4fab27dc2a5de7b191e1f6617d1f1c8855c0dc569c94a4cbb18d" dependencies = [ "serde", + "typeid", ] [[package]] name = "errno" -version = "0.3.8" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" +checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" dependencies = [ "libc", "windows-sys 0.52.0", @@ -935,9 +988,9 @@ dependencies = [ [[package]] name = "escargot" -version = "0.5.10" +version = "0.5.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f474c6844cbd04e783d0f25757583db4f491770ca618bedf2fb01815fc79939" +checksum = "650eb5f6eeda986377996e9ed570cbc20cc16d30440696f82f129c863e4e3e83" dependencies = [ "log", "once_cell", @@ -965,9 +1018,9 @@ checksum = "a2a2b11eda1d40935b26cf18f6833c526845ae8c41e58d09af6adeb6f0269183" [[package]] name = "fastrand" -version = "2.0.2" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "658bd65b1cf4c852a3cc96f18a8ce7b5640f6b703f905c7d74532294c2a63984" +checksum = "9fc0510504f03c51ada170672ac806f1f105a88aa97a5281117e1ddc3368e51a" [[package]] name = "ff" @@ -999,9 +1052,9 @@ dependencies = [ [[package]] name = "flate2" -version = "1.0.28" +version = "1.0.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e" +checksum = "5f54427cfd1c7829e2a139fcefea601bf088ebca651d2bf53ebc600eac295dae" dependencies = [ "crc32fast", "libz-sys", @@ -1051,9 +1104,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.12" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "190092ea657667030ac6a35e305e62fc4dd69fd98ac98631e5d3a2b1575a12b5" +checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ "cfg-if", "js-sys", @@ -1064,11 +1117,11 @@ dependencies = [ [[package]] name = "git2" -version = "0.18.3" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "232e6a7bfe35766bf715e55a88b39a700596c0ccfd88cd3680b4cdb40d66ef70" +checksum = "b903b73e45dc0c6c596f2d37eccece7c1c8bb6e4407b001096387c63d0d93724" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "libc", "libgit2-sys", "log", @@ -1079,9 +1132,9 @@ dependencies = [ [[package]] name = "git2-curl" -version = "0.19.0" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78e26b61608c573ffd26fc79061a823aa5147449a1afe1f61679a21e2031f7c3" +checksum = "68ff14527a1c242320039b138376f8e0786697a1b7b172bc44f6efda3ab9079f" dependencies = [ "curl", "git2", @@ -1091,9 +1144,9 @@ dependencies = [ [[package]] name = "gix" -version = "0.62.0" +version = "0.64.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5631c64fb4cd48eee767bf98a3cbc5c9318ef3bb71074d4c099a2371510282b6" +checksum = "d78414d29fcc82329080166077e0f7689f4016551fdb334d787c3d040fe2634f" dependencies = [ "gix-actor", "gix-attributes", @@ -1138,7 +1191,6 @@ dependencies = [ "gix-validate", "gix-worktree", "once_cell", - "parking_lot", "prodash", "smallvec", "thiserror", @@ -1146,9 +1198,9 @@ dependencies = [ [[package]] name = "gix-actor" -version = "0.31.1" +version = "0.31.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45c3a3bde455ad2ee8ba8a195745241ce0b770a8a26faae59fcf409d01b28c46" +checksum = "a0e454357e34b833cc3a00b6efbbd3dd4d18b24b9fb0c023876ec2645e8aa3f2" dependencies = [ "bstr", "gix-date", @@ -1160,9 +1212,9 @@ dependencies = [ [[package]] name = "gix-attributes" -version = "0.22.2" +version = "0.22.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eefb48f42eac136a4a0023f49a54ec31be1c7a9589ed762c45dcb9b953f7ecc8" +checksum = "e37ce99c7e81288c28b703641b6d5d119aacc45c1a6b247156e6249afa486257" dependencies = [ "bstr", "gix-glob", @@ -1195,9 +1247,9 @@ dependencies = [ [[package]] name = "gix-command" -version = "0.3.6" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f90009020dc4b3de47beed28e1334706e0a330ddd17f5cfeb097df3b15a54b77" +checksum = "0d76867867da891cbe32021ad454e8cae90242f6afb06762e4dd0d357afd1d7b" dependencies = [ "bstr", "gix-path", @@ -1207,9 +1259,9 @@ dependencies = [ [[package]] name = "gix-commitgraph" -version = "0.24.2" +version = "0.24.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7b102311085da4af18823413b5176d7c500fb2272eaf391cfa8635d8bcb12c4" +checksum = "133b06f67f565836ec0c473e2116a60fb74f80b6435e21d88013ac0e3c60fc78" dependencies = [ "bstr", "gix-chunk", @@ -1221,9 +1273,9 @@ dependencies = [ [[package]] name = "gix-config" -version = "0.36.1" +version = "0.38.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7580e05996e893347ad04e1eaceb92e1c0e6a3ffe517171af99bf6b6df0ca6e5" +checksum = "28f53fd03d1bf09ebcc2c8654f08969439c4556e644ca925f27cf033bc43e658" dependencies = [ "bstr", "gix-config-value", @@ -1242,11 +1294,11 @@ dependencies = [ [[package]] name = "gix-config-value" -version = "0.14.6" +version = "0.14.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbd06203b1a9b33a78c88252a625031b094d9e1b647260070c25b09910c0a804" +checksum = "b328997d74dd15dc71b2773b162cb4af9a25c424105e4876e6d0686ab41c383e" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "bstr", "gix-path", "libc", @@ -1255,9 +1307,9 @@ dependencies = [ [[package]] name = "gix-credentials" -version = "0.24.2" +version = "0.24.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c70146183bd3c7119329a3c7392d1aa0e0adbe48d727f4df31828fe6d8fdaa1" +checksum = "198588f532e4d1202e04e6c3f50e4d7c060dffc66801c6f53cc246f1d234739e" dependencies = [ "bstr", "gix-command", @@ -1272,9 +1324,9 @@ dependencies = [ [[package]] name = "gix-date" -version = "0.8.5" +version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "180b130a4a41870edfbd36ce4169c7090bca70e195da783dea088dd973daa59c" +checksum = "9eed6931f21491ee0aeb922751bd7ec97b4b2fe8fbfedcb678e2a2dce5f3b8c0" dependencies = [ "bstr", "itoa 1.0.11", @@ -1284,9 +1336,9 @@ dependencies = [ [[package]] name = "gix-diff" -version = "0.43.0" +version = "0.44.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5fbc24115b957346cd23fb0f47d830eb799c46c89cdcf2f5acc9bf2938c2d01" +checksum = "1996d5c8a305b59709467d80617c9fde48d9d75fd1f4179ea970912630886c9d" dependencies = [ "bstr", "gix-hash", @@ -1296,9 +1348,9 @@ dependencies = [ [[package]] name = "gix-dir" -version = "0.4.1" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d6fcd56ffa1133f35525af890226ad0d3b2e607b4490360c94b1869e278eba3" +checksum = "0c975679aa00dd2d757bfd3ddb232e8a188c0094c3306400575a0813858b1365" dependencies = [ "bstr", "gix-discover", @@ -1316,9 +1368,9 @@ dependencies = [ [[package]] name = "gix-discover" -version = "0.31.0" +version = "0.33.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64bab49087ed3710caf77e473dc0efc54ca33d8ccc6441359725f121211482b1" +checksum = "67662731cec3cb31ba3ed2463809493f76d8e5d6c6d245de8b0560438c13450e" dependencies = [ "bstr", "dunce", @@ -1332,9 +1384,9 @@ dependencies = [ [[package]] name = "gix-features" -version = "0.38.1" +version = "0.38.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db4254037d20a247a0367aa79333750146a369719f0c6617fec4f5752cc62b37" +checksum = "ac7045ac9fe5f9c727f38799d002a7ed3583cd777e3322a7c4b43e3cf437dc69" dependencies = [ "bytes", "crc32fast", @@ -1354,9 +1406,9 @@ dependencies = [ [[package]] name = "gix-filter" -version = "0.11.1" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c0d1f01af62bfd2fb3dd291acc2b29d4ab3e96ad52a679174626508ce98ef12" +checksum = "e6547738da28275f4dff4e9f3a0f28509f53f94dd6bd822733c91cb306bca61a" dependencies = [ "bstr", "encoding_rs", @@ -1375,21 +1427,22 @@ dependencies = [ [[package]] name = "gix-fs" -version = "0.10.2" +version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2184c40e7910529677831c8b481acf788ffd92427ed21fad65b6aa637e631b8" +checksum = "6adf99c27cdf17b1c4d77680c917e0d94d8783d4e1c73d3be0d1d63107163d7a" dependencies = [ + "fastrand", "gix-features", "gix-utils", ] [[package]] name = "gix-glob" -version = "0.16.2" +version = "0.16.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "682bdc43cb3c00dbedfcc366de2a849b582efd8d886215dbad2ea662ec156bb5" +checksum = "fa7df15afa265cc8abe92813cd354d522f1ac06b29ec6dfa163ad320575cb447" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "bstr", "gix-features", "gix-path", @@ -1418,9 +1471,9 @@ dependencies = [ [[package]] name = "gix-ignore" -version = "0.11.2" +version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "640dbeb4f5829f9fc14d31f654a34a0350e43a24e32d551ad130d99bf01f63f1" +checksum = "5e6afb8f98e314d4e1adc822449389ada863c174b5707cedd327d67b84dba527" dependencies = [ "bstr", "gix-glob", @@ -1431,11 +1484,11 @@ dependencies = [ [[package]] name = "gix-index" -version = "0.32.1" +version = "0.33.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "881ab3b1fa57f497601a5add8289e72a7ae09471fc0b9bbe483b628ae8e418a1" +checksum = "9a9a44eb55bd84bb48f8a44980e951968ced21e171b22d115d1cdcef82a7d73f" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "bstr", "filetime", "fnv", @@ -1447,6 +1500,7 @@ dependencies = [ "gix-object", "gix-traverse", "gix-utils", + "gix-validate", "hashbrown", "itoa 1.0.11", "libc", @@ -1458,9 +1512,9 @@ dependencies = [ [[package]] name = "gix-lock" -version = "13.1.1" +version = "14.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7c359f81f01b8352063319bcb39789b7ea0887b406406381106e38c4a34d049" +checksum = "e3bc7fe297f1f4614774989c00ec8b1add59571dc9b024b4c00acb7dedd4e19d" dependencies = [ "gix-tempfile", "gix-utils", @@ -1469,22 +1523,22 @@ dependencies = [ [[package]] name = "gix-macros" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1dff438f14e67e7713ab9332f5fd18c8f20eb7eb249494f6c2bf170522224032" +checksum = "999ce923619f88194171a67fb3e6d613653b8d4d6078b529b15a765da0edcc17" dependencies = [ "proc-macro2", "quote", - "syn 2.0.57", + "syn 2.0.72", ] [[package]] name = "gix-negotiate" -version = "0.13.0" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54ba98f8c8c06870dfc167d192ca38a38261867b836cb89ac80bc9176dba975e" +checksum = "9ec879fb6307bb63519ba89be0024c6f61b4b9d61f1a91fd2ce572d89fe9c224" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "gix-commitgraph", "gix-date", "gix-hash", @@ -1496,9 +1550,9 @@ dependencies = [ [[package]] name = "gix-object" -version = "0.42.1" +version = "0.42.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d4f8efae72030df1c4a81d02dbe2348e748d9b9a11e108ed6efbd846326e051" +checksum = "25da2f46b4e7c2fa7b413ce4dffb87f69eaf89c2057e386491f4c55cadbfe386" dependencies = [ "bstr", "gix-actor", @@ -1515,9 +1569,9 @@ dependencies = [ [[package]] name = "gix-odb" -version = "0.60.0" +version = "0.61.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8bbb43d2fefdc4701ffdf9224844d05b136ae1b9a73c2f90710c8dd27a93503" +checksum = "20d384fe541d93d8a3bb7d5d5ef210780d6df4f50c4e684ccba32665a5e3bc9b" dependencies = [ "arc-swap", "gix-date", @@ -1535,9 +1589,9 @@ dependencies = [ [[package]] name = "gix-pack" -version = "0.50.0" +version = "0.51.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b58bad27c7677fa6b587aab3a1aca0b6c97373bd371a0a4290677c838c9bcaf1" +checksum = "3e0594491fffe55df94ba1c111a6566b7f56b3f8d2e1efc750e77d572f5f5229" dependencies = [ "clru", "gix-chunk", @@ -1579,9 +1633,9 @@ dependencies = [ [[package]] name = "gix-path" -version = "0.10.7" +version = "0.10.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23623cf0f475691a6d943f898c4d0b89f5c1a2a64d0f92bce0e0322ee6528783" +checksum = "ebfc4febd088abdcbc9f1246896e57e37b7a34f6909840045a1767c6dafac7af" dependencies = [ "bstr", "gix-trace", @@ -1592,11 +1646,11 @@ dependencies = [ [[package]] name = "gix-pathspec" -version = "0.7.4" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea9f934a111e0efdf93ae06e3648427e60e783099fbebd6a53a7a2ffb10a1e65" +checksum = "d307d1b8f84dc8386c4aa20ce0cf09242033840e15469a3ecba92f10cfb5c046" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "bstr", "gix-attributes", "gix-config-value", @@ -1607,9 +1661,9 @@ dependencies = [ [[package]] name = "gix-prompt" -version = "0.8.4" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f5325eb17ce7b5e5d25dec5c2315d642a09d55b9888b3bf46b7d72e1621a55d8" +checksum = "7e0595d2be4b6d6a71a099e989bdd610882b882da35fb8503d91d6f81aa0936f" dependencies = [ "gix-command", "gix-config-value", @@ -1620,9 +1674,9 @@ dependencies = [ [[package]] name = "gix-protocol" -version = "0.45.0" +version = "0.45.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aed3bb6179835a3250403baa9d7022579e559fc45f2efc416d9de1a14b5acf11" +checksum = "bad8da8e89f24177bd77947092199bb13dcc318bbd73530ba8a05e6d6adaaa9d" dependencies = [ "bstr", "gix-credentials", @@ -1649,12 +1703,11 @@ dependencies = [ [[package]] name = "gix-ref" -version = "0.43.0" +version = "0.45.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd4aba68b925101cb45d6df328979af0681364579db889098a0de75b36c77b65" +checksum = "636e96a0a5562715153fee098c217110c33a6f8218f08f4687ff99afde159bb5" dependencies = [ "gix-actor", - "gix-date", "gix-features", "gix-fs", "gix-hash", @@ -1671,9 +1724,9 @@ dependencies = [ [[package]] name = "gix-refspec" -version = "0.23.0" +version = "0.23.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dde848865834a54fe4d9b4573f15d0e9a68eaf3d061b42d3ed52b4b8acf880b2" +checksum = "6868f8cd2e62555d1f7c78b784bece43ace40dd2a462daf3b588d5416e603f37" dependencies = [ "bstr", "gix-hash", @@ -1685,9 +1738,9 @@ dependencies = [ [[package]] name = "gix-revision" -version = "0.27.0" +version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e34196e1969bd5d36e2fbc4467d893999132219d503e23474a8ad2b221cb1e8" +checksum = "01b13e43c2118c4b0537ddac7d0821ae0dfa90b7b8dbf20c711e153fb749adce" dependencies = [ "bstr", "gix-date", @@ -1701,9 +1754,9 @@ dependencies = [ [[package]] name = "gix-revwalk" -version = "0.13.0" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0a7d393ae814eeaae41a333c0ff684b243121cc61ccdc5bbe9897094588047d" +checksum = "1b030ccaab71af141f537e0225f19b9e74f25fefdba0372246b844491cab43e0" dependencies = [ "gix-commitgraph", "gix-date", @@ -1716,11 +1769,11 @@ dependencies = [ [[package]] name = "gix-sec" -version = "0.10.6" +version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fddc27984a643b20dd03e97790555804f98cf07404e0e552c0ad8133266a79a1" +checksum = "1547d26fa5693a7f34f05b4a3b59a90890972922172653bcb891ab3f09f436df" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "gix-path", "libc", "windows-sys 0.52.0", @@ -1728,9 +1781,9 @@ dependencies = [ [[package]] name = "gix-submodule" -version = "0.10.0" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fb7ea05666362472fecd44c1fc35fe48a5b9b841b431cc4f85b95e6f20c23ec" +checksum = "0f2e0f69aa00805e39d39ec80472a7e9da20ed5d73318b27925a2cc198e854fd" dependencies = [ "bstr", "gix-config", @@ -1743,9 +1796,9 @@ dependencies = [ [[package]] name = "gix-tempfile" -version = "13.1.1" +version = "14.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a761d76594f4443b675e85928e4902dec333273836bd386906f01e7e346a0d11" +checksum = "d3b0e276cd08eb2a22e9f286a4f13a222a01be2defafa8621367515375644b99" dependencies = [ "gix-fs", "libc", @@ -1756,17 +1809,17 @@ dependencies = [ [[package]] name = "gix-trace" -version = "0.1.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f924267408915fddcd558e3f37295cc7d6a3e50f8bd8b606cee0808c3915157e" +checksum = "6cae0e8661c3ff92688ce1c8b8058b3efb312aba9492bbe93661a21705ab431b" [[package]] name = "gix-transport" -version = "0.42.0" +version = "0.42.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d2f783b2fe86bf2a8cf1f3b8669d65b01ab4932f32cc0101d3893e1b16a3bd6" +checksum = "27c02b83763ffe95bcc27ce5821b2b7f843315a009c06f1cd59c9b66c508c058" dependencies = [ - "base64 0.21.7", + "base64", "bstr", "curl", "gix-command", @@ -1781,11 +1834,11 @@ dependencies = [ [[package]] name = "gix-traverse" -version = "0.39.0" +version = "0.39.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4029ec209b0cc480d209da3837a42c63801dd8548f09c1f4502c60accb62aeb" +checksum = "e499a18c511e71cf4a20413b743b9f5bcf64b3d9e81e9c3c6cd399eae55a8840" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "gix-commitgraph", "gix-date", "gix-hash", @@ -1798,9 +1851,9 @@ dependencies = [ [[package]] name = "gix-url" -version = "0.27.3" +version = "0.27.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0db829ebdca6180fbe32be7aed393591df6db4a72dbbc0b8369162390954d1cf" +checksum = "e2eb9b35bba92ea8f0b5ab406fad3cf6b87f7929aa677ff10aa042c6da621156" dependencies = [ "bstr", "gix-features", @@ -1823,9 +1876,9 @@ dependencies = [ [[package]] name = "gix-validate" -version = "0.8.4" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e39fc6e06044985eac19dd34d474909e517307582e462b2eb4c8fa51b6241545" +checksum = "82c27dd34a49b1addf193c92070bcbf3beaf6e10f16a78544de6372e146a0acf" dependencies = [ "bstr", "thiserror", @@ -1833,9 +1886,9 @@ dependencies = [ [[package]] name = "gix-worktree" -version = "0.33.1" +version = "0.34.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f06ca5dd164678914fc9280ba9d1ffeb66499ccc16ab1278c513828beee88401" +checksum = "26f7326ebe0b9172220694ea69d344c536009a9b98fb0f9de092c440f3efe7a6" dependencies = [ "bstr", "gix-attributes", @@ -1847,6 +1900,7 @@ dependencies = [ "gix-index", "gix-object", "gix-path", + "gix-validate", ] [[package]] @@ -1864,8 +1918,8 @@ dependencies = [ "aho-corasick", "bstr", "log", - "regex-automata 0.4.6", - "regex-syntax 0.8.3", + "regex-automata 0.4.7", + "regex-syntax 0.8.4", ] [[package]] @@ -1906,9 +1960,9 @@ dependencies = [ [[package]] name = "hashbrown" -version = "0.14.3" +version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" dependencies = [ "ahash", "allocator-api2", @@ -1966,7 +2020,7 @@ dependencies = [ name = "home" version = "0.5.11" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -2013,7 +2067,7 @@ dependencies = [ "globset", "log", "memchr", - "regex-automata 0.4.6", + "regex-automata 0.4.7", "same-file", "walkdir", "winapi-util", @@ -2035,9 +2089,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.2.6" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" +checksum = "de3fc2e30ba82dd1b3911c8de1ffc143c74a914a14e99514d7637e3099df5ea0" dependencies = [ "equivalent", "hashbrown", @@ -2054,6 +2108,21 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "is_executable" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ba3d8548b8b04dafdf2f4cc6f5e379db766d0a6d9aac233ad4c9a92ea892233" +dependencies = [ + "winapi", +] + +[[package]] +name = "is_terminal_polyfill" +version = "1.70.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" + [[package]] name = "itertools" version = "0.10.5" @@ -2065,9 +2134,9 @@ dependencies = [ [[package]] name = "itertools" -version = "0.12.1" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba291022dbbd398a455acf126c1e341954079855bc60dfdda641363bd6922569" +checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" dependencies = [ "either", ] @@ -2086,9 +2155,9 @@ checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" [[package]] name = "jobserver" -version = "0.1.28" +version = "0.1.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab46a6e9526ddef3ae7f787c06f0f2600639ba80ea3eade3d8e670a2230f51d6" +checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0" dependencies = [ "libc", ] @@ -2113,9 +2182,9 @@ dependencies = [ [[package]] name = "lazy_static" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" [[package]] name = "lazycell" @@ -2131,9 +2200,9 @@ checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67" [[package]] name = "libc" -version = "0.2.153" +version = "0.2.158" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" +checksum = "d8adc4bb1803a324070e64a98ae98f38934d91957a99cfb3a43dcbc01bc56439" [[package]] name = "libdbus-sys" @@ -2147,9 +2216,9 @@ dependencies = [ [[package]] name = "libgit2-sys" -version = "0.16.2+1.7.2" +version = "0.17.0+1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee4126d8b4ee5c9d9ea891dd875cfdc1e9d0950437179104b183d7d8a74d24e8" +checksum = "10472326a8a6477c3c20a64547b0059e4b0d086869eee31e6d7da728a8eb7224" dependencies = [ "cc", "libc", @@ -2161,12 +2230,12 @@ dependencies = [ [[package]] name = "libloading" -version = "0.8.3" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c2a198fb6b0eada2a8df47933734e6d35d350665a33a3593d7164fa52c75c19" +checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" dependencies = [ "cfg-if", - "windows-targets 0.52.4", + "windows-targets 0.52.6", ] [[package]] @@ -2177,9 +2246,9 @@ checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" [[package]] name = "libnghttp2-sys" -version = "0.1.9+1.58.0" +version = "0.1.10+1.61.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b57e858af2798e167e709b9d969325b6d8e9d50232fcbc494d7d54f976854a64" +checksum = "959c25552127d2e1fa72f0e52548ec04fc386e827ba71a7bd01db46a447dc135" dependencies = [ "cc", "libc", @@ -2187,9 +2256,9 @@ dependencies = [ [[package]] name = "libsqlite3-sys" -version = "0.28.0" +version = "0.30.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c10584274047cb335c23d3e61bcef8e323adae7c5c8c760540f73610177fc3f" +checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149" dependencies = [ "cc", "pkg-config", @@ -2212,9 +2281,9 @@ dependencies = [ [[package]] name = "libz-sys" -version = "1.1.16" +version = "1.1.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e143b5e666b2695d28f6bca6497720813f699c9602dd7f5cac91008b8ada7f9" +checksum = "c15da26e5af7e25c90b37a2d75cdbf940cf4a55316de9d84c679c9b8bfabf82e" dependencies = [ "cc", "libc", @@ -2224,9 +2293,9 @@ dependencies = [ [[package]] name = "linux-raw-sys" -version = "0.4.13" +version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c" +checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" [[package]] name = "lock_api" @@ -2240,9 +2309,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.21" +version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" +checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" [[package]] name = "matchers" @@ -2261,7 +2330,7 @@ checksum = "5cf92c10c7e361d6b99666ec1c6f9805b0bea2c3bd8c78dc6fe98ac5bd78db11" dependencies = [ "proc-macro2", "quote", - "syn 2.0.57", + "syn 2.0.72", ] [[package]] @@ -2279,9 +2348,9 @@ dependencies = [ [[package]] name = "memchr" -version = "2.7.2" +version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "memmap2" @@ -2359,9 +2428,9 @@ checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" [[package]] name = "num-traits" -version = "0.2.18" +version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da0df0e5185db44f69b44f26786fe401b6c293d1907744beaa7fa62b2e5a517a" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ "autocfg", "libm", @@ -2390,9 +2459,9 @@ checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" [[package]] name = "opener" -version = "0.7.0" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9901cb49d7fc923b256db329ee26ffed69130bf05d74b9efdd1875c92d6af01" +checksum = "f8df34be653210fbe9ffaff41d3b92721c56ce82dfee58ee684f9afb5e3a90c0" dependencies = [ "bstr", "dbus", @@ -2406,7 +2475,7 @@ version = "0.10.57" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bac25ee399abb46215765b1cb35bc0212377e58a061560d8b29b024fd0430e7c" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "cfg-if", "foreign-types", "libc", @@ -2423,7 +2492,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.57", + "syn 2.0.72", ] [[package]] @@ -2434,9 +2503,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-src" -version = "111.28.1+1.1.1w" +version = "111.28.2+1.1.1w" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bf7e82ffd6d3d6e6524216a0bfd85509f68b5b28354e8e7800057e44cefa9b4" +checksum = "bb1830e20a48a975ca898ca8c1d036a36c3c6c5cb7dabc1c216706587857920f" dependencies = [ "cc", ] @@ -2547,9 +2616,9 @@ dependencies = [ [[package]] name = "pasetors" -version = "0.6.8" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b36d47c66f2230dd1b7143d9afb2b4891879020210eddf2ccb624e529b96dba" +checksum = "b719f62f9b77cfadf01500984ab26c7bda71b3f1e497e9dbb055ff466dbe2a86" dependencies = [ "ct-codecs", "ed25519-compact", @@ -2618,7 +2687,7 @@ dependencies = [ "pest_meta", "proc-macro2", "quote", - "syn 2.0.57", + "syn 2.0.72", ] [[package]] @@ -2705,9 +2774,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.79" +version = "1.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e835ff2298f5721608eb1a980ecaee1aef2c132bf95ecc026a11b7bf3c01c02e" +checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" dependencies = [ "unicode-ident", ] @@ -2723,19 +2792,19 @@ dependencies = [ [[package]] name = "proptest" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31b476131c3c86cb68032fdc5cb6d5a1045e3e42d96b69fa599fd77701e1f5bf" +checksum = "b4c2511913b88df1637da85cc8d96ec8e43a3f8bb8ccb71ee1ac240d6f3df58d" dependencies = [ "bit-set", "bit-vec", - "bitflags 2.5.0", + "bitflags 2.6.0", "lazy_static", "num-traits", "rand", "rand_chacha", "rand_xorshift", - "regex-syntax 0.8.3", + "regex-syntax 0.8.4", "rusty-fork", "tempfile", "unarray", @@ -2743,11 +2812,11 @@ dependencies = [ [[package]] name = "pulldown-cmark" -version = "0.10.2" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f0530d13d87d1f549b66a3e8d0c688952abe5994e204ed62615baaf25dc029c" +checksum = "8746739f11d39ce5ad5c2520a9b75285310dbfe78c541ccf832d38615765aec0" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "memchr", "pulldown-cmark-escape", "unicase", @@ -2755,9 +2824,9 @@ dependencies = [ [[package]] name = "pulldown-cmark-escape" -version = "0.10.0" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5d8f9aa0e3cbcfaf8bf00300004ee3b72f74770f9cbac93f6928771f613276b" +checksum = "007d8adb5ddab6f8e3f491ac63566a7d5002cc7ed73901f72057943fa71ae1ae" [[package]] name = "quick-error" @@ -2767,9 +2836,9 @@ checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" [[package]] name = "quote" -version = "1.0.35" +version = "1.0.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" +checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" dependencies = [ "proc-macro2", ] @@ -2853,14 +2922,14 @@ dependencies = [ [[package]] name = "regex" -version = "1.10.4" +version = "1.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c117dbdfde9c8308975b6a18d71f3f385c89461f7b3fb054288ecf2a2058ba4c" +checksum = "b91213439dad192326a0d7c6ee3955910425f441d7038e0d6933b0aec5c4517f" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.4.6", - "regex-syntax 0.8.3", + "regex-automata 0.4.7", + "regex-syntax 0.8.4", ] [[package]] @@ -2874,13 +2943,13 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.6" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86b83b8b9847f9bf95ef68afb0b8e6cdb80f498442f5179a29fad448fcc1eaea" +checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" dependencies = [ "aho-corasick", "memchr", - "regex-syntax 0.8.3", + "regex-syntax 0.8.4", ] [[package]] @@ -2891,15 +2960,16 @@ checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "regex-syntax" -version = "0.8.3" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adad44e29e4c806119491a7f06f03de4d1af22c3a680dd47f1e6e179439d1f56" +checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b" [[package]] name = "resolver-tests" version = "0.0.0" dependencies = [ "cargo", + "cargo-platform 0.1.9", "cargo-util", "cargo-util-schemas", "proptest", @@ -2918,11 +2988,11 @@ dependencies = [ [[package]] name = "rusqlite" -version = "0.31.0" +version = "0.32.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b838eba278d213a8beaf485bd313fd580ca4505a00d5871caeb1457c55322cae" +checksum = "7753b721174eb8ff87a9a0e799e2d7bc3749323e773db92e0984debb00019d6e" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "fallible-iterator", "fallible-streaming-iterator", "hashlink", @@ -2938,7 +3008,7 @@ checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] name = "rustfix" -version = "0.8.3" +version = "0.8.7" dependencies = [ "anyhow", "proptest", @@ -2953,11 +3023,11 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.32" +version = "0.38.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "65e04861e65f21776e67888bfbea442b3642beaa0138fdb1dd7a84a52dffdb89" +checksum = "8acb788b847c24f28525660c4d7758620a7210875711f79e7f663cc152726811" dependencies = [ - "bitflags 2.5.0", + "bitflags 2.6.0", "errno", "libc", "linux-raw-sys", @@ -2978,9 +3048,9 @@ dependencies = [ [[package]] name = "ryu" -version = "1.0.17" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1" +checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" [[package]] name = "same-file" @@ -3022,12 +3092,12 @@ dependencies = [ [[package]] name = "security-framework" -version = "2.10.0" +version = "2.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "770452e37cad93e0a50d5abc3990d2bc351c36d0328f86cefec2f2fb206eaef6" +checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" dependencies = [ - "bitflags 1.3.2", - "core-foundation", + "bitflags 2.6.0", + "core-foundation 0.9.4", "core-foundation-sys", "libc", "security-framework-sys", @@ -3035,9 +3105,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.10.0" +version = "2.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41f3cc463c0ef97e11c3461a9d3787412d30e8e7eb907c79180c4a57bf7c04ef" +checksum = "75da29fe9b9b08fe9d6b22b5b4bcbc75d8db3aa31e639aa56bb62e9d46bfceaf" dependencies = [ "core-foundation-sys", "libc", @@ -3045,9 +3115,9 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.22" +version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92d43fe69e652f3df9bdc2b85b2854a0825b86e4fb76bc44d945137d053639ca" +checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" dependencies = [ "serde", ] @@ -3061,21 +3131,22 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.197" +version = "1.0.204" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2" +checksum = "bc76f558e0cbb2a839d37354c575f1dc3fdc6546b5be373ba43d95f231bf7c12" dependencies = [ "serde_derive", ] [[package]] name = "serde-untagged" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a160535368dfc353348e7eaa299156bd508c60c45a9249725f5f6d370d82a66" +checksum = "2676ba99bd82f75cae5cbd2c8eda6fa0b8760f18978ea840e980dd5567b5c5b6" dependencies = [ "erased-serde", "serde", + "typeid", ] [[package]] @@ -3090,13 +3161,13 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.197" +version = "1.0.204" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" +checksum = "e0cd7e117be63d3c3678776753929474f3b04a43a080c744d6b0ae2a8c28e222" dependencies = [ "proc-macro2", "quote", - "syn 2.0.57", + "syn 2.0.72", ] [[package]] @@ -3110,20 +3181,21 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.115" +version = "1.0.121" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12dc5c46daa8e9fdf4f5e71b6cf9a53f2487da0e86e55808e2d35539666497dd" +checksum = "4ab380d7d9f22ef3f21ad3e6c1ebe8e4fc7a2000ccba2e4d71fc96f15b2cb609" dependencies = [ "itoa 1.0.11", + "memchr", "ryu", "serde", ] [[package]] name = "serde_spanned" -version = "0.6.5" +version = "0.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb3622f419d1296904700073ea6cc23ad690adbd66f13ea683df73298736f0c1" +checksum = "eb5b1b31579f3811bf615c144393417496f152e12ac8b7663bf664f4a815306d" dependencies = [ "serde", ] @@ -3177,6 +3249,12 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde" +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + [[package]] name = "signature" version = "2.2.0" @@ -3189,9 +3267,9 @@ dependencies = [ [[package]] name = "similar" -version = "2.5.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa42c91313f1d05da9b26f267f931cf178d4aba455b4c4622dd7355eb80c6640" +checksum = "1de1d4f81173b03af4c0cbed3c898f6bff5b870e4a7f5d6f4057d62a7a4b686e" [[package]] name = "sized-chunks" @@ -3211,9 +3289,9 @@ checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" [[package]] name = "snapbox" -version = "0.5.9" +version = "0.6.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ac441e1ecf678f68423d47f376d53fabce1afba92c8f68e31508eb27df8562a" +checksum = "7ba434818a8a9b1b106404288d6bd75a94348aae8fc9a518b211b609a36a54bc" dependencies = [ "anstream", "anstyle", @@ -3223,6 +3301,8 @@ dependencies = [ "escargot", "filetime", "normalize-line-endings", + "regex", + "serde", "serde_json", "similar", "snapbox-macros", @@ -3232,9 +3312,9 @@ dependencies = [ [[package]] name = "snapbox-macros" -version = "0.3.8" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1c4b838b05d15ab22754068cb73500b2f3b07bf09d310e15b27f88160f1de40" +checksum = "16569f53ca23a41bb6f62e0a5084aa1661f4814a67fa33696a79073e03a664af" dependencies = [ "anstream", ] @@ -3267,9 +3347,9 @@ checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" [[package]] name = "strsim" -version = "0.11.0" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ee073c9e4cd00e28217186dbe12796d692868f432bf2e97ee73bed0c56dfa01" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" [[package]] name = "subtle" @@ -3302,9 +3382,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.57" +version = "2.0.72" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11a6ae1e52eb25aab8f3fb9fca13be982a373b8f1157ca14b897a825ba4a2d35" +checksum = "dc4b9b9bf2add8093d3f2c0204471e951b2285580335de42f9d2534f3ae7a8af" dependencies = [ "proc-macro2", "quote", @@ -3325,9 +3405,9 @@ dependencies = [ [[package]] name = "tar" -version = "0.4.40" +version = "0.4.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b16afcea1f22891c49a00c751c7b63b2233284064f11a200fc624137c51e2ddb" +checksum = "4ff6c40d3aedb5e06b57c6f669ad17ab063dd1e63d977c6a88e7f4dfa4f04020" dependencies = [ "filetime", "libc", @@ -3357,22 +3437,22 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.58" +version = "1.0.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03468839009160513471e86a034bb2c5c0e4baae3b43f79ffc55c4a5427b3297" +checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.58" +version = "1.0.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c61f3ba182994efc43764a46c018c347bc492c79f024e705f46567b418f6d4f7" +checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" dependencies = [ "proc-macro2", "quote", - "syn 2.0.57", + "syn 2.0.72", ] [[package]] @@ -3387,9 +3467,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.34" +version = "0.3.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c8248b6521bb14bc45b4067159b9b6ad792e2d6d754d6c41fb50e29fefe38749" +checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" dependencies = [ "deranged", "itoa 1.0.11", @@ -3410,9 +3490,9 @@ checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "time-macros" -version = "0.2.17" +version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ba3a3ef41e6672a2f0f001392bb5dcd3ff0a9992d618ca761a11c3121547774" +checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" dependencies = [ "num-conv", "time-core", @@ -3430,9 +3510,9 @@ dependencies = [ [[package]] name = "tinyvec" -version = "1.6.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938" dependencies = [ "tinyvec_macros", ] @@ -3445,9 +3525,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "toml" -version = "0.8.12" +version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9dd1545e8208b4a5af1aa9bbd0b4cf7e9ea08fabc5d0a5c67fcaafa17433aa3" +checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e" dependencies = [ "serde", "serde_spanned", @@ -3457,18 +3537,18 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.6.5" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3550f4e9685620ac18a50ed434eb3aec30db8ba93b0287467bca5826ea25baf1" +checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" dependencies = [ "serde", ] [[package]] name = "toml_edit" -version = "0.22.11" +version = "0.22.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb686a972ccef8537b39eead3968b0e8616cb5040dbb9bba93007c8e07c9215f" +checksum = "583c44c02ad26b0c3f3066fe629275e50627026c51ac2e595cca4c230ce1ce1d" dependencies = [ "indexmap", "serde", @@ -3496,7 +3576,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.57", + "syn 2.0.72", ] [[package]] @@ -3549,6 +3629,12 @@ dependencies = [ "tracing-log", ] +[[package]] +name = "typeid" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "059d83cc991e7a42fc37bd50941885db0888e34209f8cfd9aab07ddec03bc9cf" + [[package]] name = "typenum" version = "1.17.0" @@ -3605,9 +3691,15 @@ dependencies = [ [[package]] name = "unicode-width" -version = "0.1.11" +version = "0.1.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0336d538f7abc86d282a4189614dfaa90810dfc2c6f6427eaf88e16311dd225d" + +[[package]] +name = "unicode-width" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85" +checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" [[package]] name = "unicode-xid" @@ -3617,9 +3709,9 @@ checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" [[package]] name = "url" -version = "2.5.0" +version = "2.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" +checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c" dependencies = [ "form_urlencoded", "idna", @@ -3634,9 +3726,9 @@ checksum = "86bd8d4e895da8537e5315b8254664e6b769c4ff3db18321b297a1e7004392e3" [[package]] name = "utf8parse" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "valuable" @@ -3789,7 +3881,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.57", + "syn 2.0.72", "wasm-bindgen-shared", ] @@ -3811,7 +3903,7 @@ checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.57", + "syn 2.0.72", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -3850,11 +3942,11 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.6" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" +checksum = "4d4cc384e1e73b93bafa6fb4f1df8c41695c8a91cf9c4c64358067d15a7b6c6b" dependencies = [ - "winapi", + "windows-sys 0.52.0", ] [[package]] @@ -3878,7 +3970,16 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ - "windows-targets 0.52.4", + "windows-targets 0.52.6", +] + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", ] [[package]] @@ -3898,17 +3999,18 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dd37b7e5ab9018759f893a1952c9420d060016fc19a472b4bb20d1bdd694d1b" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ - "windows_aarch64_gnullvm 0.52.4", - "windows_aarch64_msvc 0.52.4", - "windows_i686_gnu 0.52.4", - "windows_i686_msvc 0.52.4", - "windows_x86_64_gnu 0.52.4", - "windows_x86_64_gnullvm 0.52.4", - "windows_x86_64_msvc 0.52.4", + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", ] [[package]] @@ -3919,9 +4021,9 @@ checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_gnullvm" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcf46cf4c365c6f2d1cc93ce535f2c8b244591df96ceee75d8e83deb70a9cac9" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_msvc" @@ -3931,9 +4033,9 @@ checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_aarch64_msvc" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da9f259dd3bcf6990b55bffd094c4f7235817ba4ceebde8e6d11cd0c5633b675" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_i686_gnu" @@ -3943,9 +4045,15 @@ checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_gnu" -version = "0.52.4" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b474d8268f99e0995f25b9f095bc7434632601028cf86590aea5c8a5cb7801d3" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_msvc" @@ -3955,9 +4063,9 @@ checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_i686_msvc" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1515e9a29e5bed743cb4415a9ecf5dfca648ce85ee42e15873c3cd8610ff8e02" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_x86_64_gnu" @@ -3967,9 +4075,9 @@ checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnu" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5eee091590e89cc02ad514ffe3ead9eb6b660aedca2183455434b93546371a03" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnullvm" @@ -3979,9 +4087,9 @@ checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_gnullvm" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77ca79f2451b49fa9e2af39f0747fe999fcda4f5e241b2898624dca97a1f2177" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_msvc" @@ -3991,15 +4099,15 @@ checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "windows_x86_64_msvc" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" -version = "0.6.5" +version = "0.6.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dffa400e67ed5a4dd237983829e66475f0a4a26938c4b04c21baede6262215b8" +checksum = "68a9bda4691f099d435ad181000724da8e5899daa10713c2d432552b9ccd3a6f" dependencies = [ "memchr", ] @@ -4022,6 +4130,16 @@ dependencies = [ "tracing-subscriber", ] +[[package]] +name = "xtask-lint-docs" +version = "0.1.0" +dependencies = [ + "anyhow", + "cargo", + "clap", + "itertools 0.13.0", +] + [[package]] name = "xtask-stale-label" version = "0.0.0" @@ -4031,22 +4149,22 @@ dependencies = [ [[package]] name = "zerocopy" -version = "0.7.32" +version = "0.7.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74d4d3961e53fa4c9a25a8637fc2bfaf2595b3d3ae34875568a5cf64787716be" +checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.7.32" +version = "0.7.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" +checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.57", + "syn 2.0.72", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index d55c2bdf7c3..c797f11c739 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -11,42 +11,44 @@ exclude = [ ] [workspace.package] -rust-version = "1.75" # MSRV:3 +rust-version = "1.78" # MSRV:3 edition = "2021" license = "MIT OR Apache-2.0" homepage = "https://github.com/rust-lang/cargo" repository = "https://github.com/rust-lang/cargo" [workspace.dependencies] -annotate-snippets = "0.11.1" -anstream = "0.6.13" -anstyle = "1.0.6" -anyhow = "1.0.81" -base64 = "0.22.0" +annotate-snippets = "0.11.4" +anstream = "0.6.15" +anstyle = "1.0.8" +anyhow = "1.0.86" +base64 = "0.22.1" +blake3 = "1.5.2" bytesize = "1.3" cargo = { path = "" } cargo-credential = { version = "0.4.2", path = "credential/cargo-credential" } -cargo-credential-libsecret = { version = "0.4.2", path = "credential/cargo-credential-libsecret" } -cargo-credential-macos-keychain = { version = "0.4.2", path = "credential/cargo-credential-macos-keychain" } -cargo-credential-wincred = { version = "0.4.2", path = "credential/cargo-credential-wincred" } +cargo-credential-libsecret = { version = "0.4.7", path = "credential/cargo-credential-libsecret" } +cargo-credential-macos-keychain = { version = "0.4.7", path = "credential/cargo-credential-macos-keychain" } +cargo-credential-wincred = { version = "0.4.7", path = "credential/cargo-credential-wincred" } cargo-platform = { path = "crates/cargo-platform", version = "0.1.5" } -cargo-test-macro = { version = "0.2.0", path = "crates/cargo-test-macro" } -cargo-test-support = { version = "0.2.0", path = "crates/cargo-test-support" } -cargo-util = { version = "0.2.9", path = "crates/cargo-util" } -cargo-util-schemas = { version = "0.3.0", path = "crates/cargo-util-schemas" } +cargo-test-macro = { version = "0.3.0", path = "crates/cargo-test-macro" } +cargo-test-support = { version = "0.5.0", path = "crates/cargo-test-support" } +cargo-util = { version = "0.2.14", path = "crates/cargo-util" } +cargo-util-schemas = { version = "0.7.0", path = "crates/cargo-util-schemas" } cargo_metadata = "0.18.1" -clap = "4.5.4" -color-print = "0.3.5" -core-foundation = { version = "0.9.4", features = ["mac_os_10_7_support"] } -crates-io = { version = "0.40.0", path = "crates/crates-io" } +clap = "4.5.18" +clap_complete = { version = "4.5.32", features = ["unstable-dynamic"] } +color-print = "0.3.6" +core-foundation = { version = "0.10.0", features = ["mac_os_10_7_support"] } +crates-io = { version = "0.40.4", path = "crates/crates-io" } criterion = { version = "0.5.1", features = ["html_reports"] } curl = "0.4.46" -curl-sys = "0.4.72" +curl-sys = "0.4.73" filetime = "0.2.23" -flate2 = { version = "1.0.28", default-features = false, features = ["zlib"] } -git2 = "0.18.3" -git2-curl = "0.19.0" -gix = { version = "0.62.0", default-features = false, features = ["blocking-http-transport-curl", "progress-tree", "revision", "parallel", "dirwalk"] } +flate2 = { version = "1.0.30", default-features = false, features = ["zlib"] } +git2 = "0.19.0" +git2-curl = "0.20.0" +gix = { version = "0.64.0", default-features = false, features = ["blocking-http-transport-curl", "progress-tree", "parallel", "dirwalk"] } glob = "0.3.1" handlebars = { version = "5.1.2", features = ["dir_source"] } hex = "0.4.3" @@ -56,58 +58,59 @@ http-auth = { version = "0.1.9", default-features = false } humantime = "2.1.0" ignore = "0.4.22" im-rc = "15.1.0" -indexmap = "2" -itertools = "0.12.1" -jobserver = "0.1.28" +indexmap = "2.2.6" +itertools = "0.13.0" +jobserver = "0.1.32" lazycell = "1.3.0" -libc = "0.2.153" -libgit2-sys = "0.16.2" -libloading = "0.8.3" -memchr = "2.7.2" +libc = "0.2.155" +libgit2-sys = "0.17.0" +libloading = "0.8.5" +memchr = "2.7.4" miow = "0.6.0" -opener = "0.7.0" -openssl = "0.10.57" +opener = "0.7.1" +openssl = "=0.10.57" # See rust-lang/cargo#13546 and openssl/openssl#23376 for pinning openssl-sys = "=0.9.92" # See rust-lang/cargo#13546 and openssl/openssl#23376 for pinning os_info = { version = "3.8.2", default-features = false } -pasetors = { version = "0.6.8", features = ["v3", "paserk", "std", "serde"] } -pathdiff = "0.2" -percent-encoding = "2.3" +pasetors = { version = "0.7.0", features = ["v3", "paserk", "std", "serde"] } +pathdiff = "0.2.1" +percent-encoding = "2.3.1" pkg-config = "0.3.30" -proptest = "1.4.0" -pulldown-cmark = { version = "0.10.2", default-features = false, features = ["html"] } +proptest = "1.5.0" +pulldown-cmark = { version = "0.11.0", default-features = false, features = ["html"] } rand = "0.8.5" -regex = "1.10.4" -rusqlite = { version = "0.31.0", features = ["bundled"] } +regex = "1.10.5" +rusqlite = { version = "0.32.0", features = ["bundled"] } rustfix = { version = "0.8.2", path = "crates/rustfix" } same-file = "1.0.6" -security-framework = "2.10.0" -semver = { version = "1.0.22", features = ["serde"] } -serde = "1.0.197" -serde-untagged = "0.1.5" +security-framework = "2.11.1" +semver = { version = "1.0.23", features = ["serde"] } +serde = "1.0.204" +serde-untagged = "0.1.6" serde-value = "0.7.0" serde_ignored = "0.1.10" -serde_json = "1.0.115" +serde_json = "1.0.120" sha1 = "0.10.6" sha2 = "0.10.8" shell-escape = "0.1.5" +similar = "2.6.0" supports-hyperlinks = "3.0.0" -snapbox = { version = "0.5.9", features = ["diff", "path", "term-svg"] } -tar = { version = "0.4.40", default-features = false } +snapbox = { version = "0.6.18", features = ["diff", "dir", "term-svg", "regex", "json"] } +tar = { version = "0.4.42", default-features = false } tempfile = "3.10.1" -thiserror = "1.0.58" -time = { version = "0.3", features = ["parsing", "formatting", "serde"] } -toml = "0.8.12" -toml_edit = { version = "0.22.11", features = ["serde"] } -tracing = "0.1.40" # be compatible with rustc_log: https://github.com/rust-lang/rust/blob/e51e98dde6a/compiler/rustc_log/Cargo.toml#L9 +thiserror = "1.0.63" +time = { version = "0.3.36", features = ["parsing", "formatting", "serde"] } +toml = "0.8.19" +toml_edit = { version = "0.22.20", features = ["serde"] } +tracing = { version = "0.1.40", default-features = false, features = ["std"] } # be compatible with rustc_log: https://github.com/rust-lang/rust/blob/e51e98dde6a/compiler/rustc_log/Cargo.toml#L9 tracing-chrome = "0.7.2" tracing-subscriber = { version = "0.3.18", features = ["env-filter"] } unicase = "2.7.0" -unicode-width = "0.1.11" +unicode-width = "0.2.0" unicode-xid = "0.2.4" -url = "2.5.0" +url = "2.5.2" varisat = "0.2.2" walkdir = "2.5.0" -windows-sys = "0.52" +windows-sys = "0.59" [workspace.lints.rust] rust_2018_idioms = "warn" # TODO: could this be removed? @@ -125,10 +128,10 @@ self_named_module_files = "warn" [package] name = "cargo" -version = "0.80.0" +version = "0.84.0" edition.workspace = true license.workspace = true -rust-version = "1.77" # MSRV:1 +rust-version = "1.81" # MSRV:1 homepage = "https://doc.rust-lang.org/cargo/index.html" repository.workspace = true documentation = "https://docs.rs/cargo" @@ -146,12 +149,14 @@ anstream.workspace = true anstyle.workspace = true anyhow.workspace = true base64.workspace = true +blake3.workspace = true bytesize.workspace = true cargo-credential.workspace = true cargo-platform.workspace = true cargo-util-schemas.workspace = true cargo-util.workspace = true clap = { workspace = true, features = ["wrap_help"] } +clap_complete.workspace = true color-print.workspace = true crates-io.workspace = true curl = { workspace = true, features = ["http2"] } @@ -194,10 +199,11 @@ shell-escape.workspace = true supports-hyperlinks.workspace = true tar.workspace = true tempfile.workspace = true +thiserror.workspace = true time.workspace = true toml.workspace = true toml_edit.workspace = true -tracing.workspace = true +tracing = { workspace = true, features = ["attributes"] } tracing-subscriber.workspace = true unicase.workspace = true unicode-width.workspace = true @@ -237,8 +243,8 @@ features = [ [dev-dependencies] annotate-snippets = { workspace = true, features = ["testing-colors"] } -cargo-test-macro.workspace = true cargo-test-support.workspace = true +gix = { workspace = true, features = ["revision"] } same-file.workspace = true snapbox.workspace = true diff --git a/README.md b/README.md index 1d806b97864..6afe150ea9e 100644 --- a/README.md +++ b/README.md @@ -9,17 +9,19 @@ Cargo downloads your Rust project’s dependencies and compiles your project. [The Cargo Book]: https://doc.rust-lang.org/cargo/ [Cargo Contributor Guide]: https://rust-lang.github.io/cargo/contrib/ +> The Cargo binary distributed through with Rust is maintained by the Cargo +> team for use by the wider ecosystem. +> For all other uses of this crate (as a binary or library) this is maintained +> by the Cargo team, primarily for use by Cargo and not intended for external +> use (except as a transitive dependency). This crate may make major changes to +> its APIs. + ## Code Status [![CI](https://github.com/rust-lang/cargo/actions/workflows/main.yml/badge.svg?branch=auto-cargo)](https://github.com/rust-lang/cargo/actions/workflows/main.yml) Code documentation: -## Installing Cargo - -Cargo is distributed by default with Rust, so if you've got `rustc` installed -locally you probably also have `cargo` installed locally. - ## Compiling from Source ### Requirements diff --git a/benches/benchsuite/README.md b/benches/benchsuite/README.md new file mode 100644 index 00000000000..ca004d0c62a --- /dev/null +++ b/benches/benchsuite/README.md @@ -0,0 +1,3 @@ +> This crate is maintained by the Cargo team, primarily for use by Cargo +> and not intended for external use. This +> crate may make major changes to its APIs or be deprecated without warning. diff --git a/benches/benchsuite/benches/resolve.rs b/benches/benchsuite/benches/resolve.rs index 89d0212e378..d798dd6d62b 100644 --- a/benches/benchsuite/benches/resolve.rs +++ b/benches/benchsuite/benches/resolve.rs @@ -33,6 +33,7 @@ fn do_resolve<'gctx>(gctx: &'gctx GlobalContext, ws_root: &Path) -> ResolveInfo< let force_all_targets = ForceAllTargets::No; // Do an initial run to download anything necessary so that it does // not confuse criterion's warmup. + let dry_run = false; let ws_resolve = cargo::ops::resolve_ws_with_opts( &ws, &mut target_data, @@ -41,6 +42,7 @@ fn do_resolve<'gctx>(gctx: &'gctx GlobalContext, ws_root: &Path) -> ResolveInfo< &specs, has_dev_units, force_all_targets, + dry_run, ) .unwrap(); ResolveInfo { @@ -71,6 +73,7 @@ fn resolve_ws(c: &mut Criterion) { // iterator once, and we don't want to call `do_resolve` in every // "step", since that would just be some useless work. let mut lazy_info = None; + let dry_run = false; group.bench_function(&ws_name, |b| { let ResolveInfo { ws, @@ -91,6 +94,7 @@ fn resolve_ws(c: &mut Criterion) { specs, *has_dev_units, *force_all_targets, + dry_run, ) .unwrap(); }) diff --git a/benches/benchsuite/src/lib.rs b/benches/benchsuite/src/lib.rs index 327c04c4f05..2d85584d513 100644 --- a/benches/benchsuite/src/lib.rs +++ b/benches/benchsuite/src/lib.rs @@ -1,3 +1,7 @@ +//! > This crate is maintained by the Cargo team, primarily for use by Cargo +//! > and not intended for external use. This +//! > crate may make major changes to its APIs or be deprecated without warning. + #![allow(clippy::disallowed_methods)] use cargo::GlobalContext; diff --git a/build.rs b/build.rs index 60fda40e323..9b1e2662eaf 100644 --- a/build.rs +++ b/build.rs @@ -47,16 +47,15 @@ fn compress_man() { encoder.finish().unwrap(); } -fn commit_info() { - if !Path::new(".git").exists() { - return; - } +struct CommitInfo { + hash: String, + short_hash: String, + date: String, +} - // Var set by bootstrap whenever omit-git-hash is enabled in rust-lang/rust's config.toml. - println!("cargo:rerun-if-env-changed=CFG_OMIT_GIT_HASH"); - #[allow(clippy::disallowed_methods)] - if std::env::var_os("CFG_OMIT_GIT_HASH").is_some() { - return; +fn commit_info_from_git() -> Option { + if !Path::new(".git").exists() { + return None; } let output = match Command::new("git") @@ -68,14 +67,66 @@ fn commit_info() { .output() { Ok(output) if output.status.success() => output, - _ => return, + _ => return None, }; + let stdout = String::from_utf8(output.stdout).unwrap(); - let mut parts = stdout.split_whitespace(); - let mut next = || parts.next().unwrap(); - println!("cargo:rustc-env=CARGO_COMMIT_HASH={}", next()); - println!("cargo:rustc-env=CARGO_COMMIT_SHORT_HASH={}", next()); - println!("cargo:rustc-env=CARGO_COMMIT_DATE={}", next()) + let mut parts = stdout.split_whitespace().map(|s| s.to_string()); + + Some(CommitInfo { + hash: parts.next()?, + short_hash: parts.next()?, + date: parts.next()?, + }) +} + +// The rustc source tarball is meant to contain all the source code to build an exact copy of the +// toolchain, but it doesn't include the git repository itself. It wouldn't thus be possible to +// populate the version information with the commit hash and the commit date. +// +// To work around this, the rustc build process obtains the git information when creating the +// source tarball and writes it to the `git-commit-info` file. The build process actually creates +// at least *two* of those files, one for Rust as a whole (in the root of the tarball) and one +// specifically for Cargo (in src/tools/cargo). This function loads that file. +// +// The file is a newline-separated list of full commit hash, short commit hash, and commit date. +fn commit_info_from_rustc_source_tarball() -> Option { + let path = Path::new("git-commit-info"); + if !path.exists() { + return None; + } + + // Dependency tracking is a nice to have for this (git doesn't do it), so if the path is not + // valid UTF-8 just avoid doing it rather than erroring out. + if let Some(utf8) = path.to_str() { + println!("cargo:rerun-if-changed={utf8}"); + } + + let content = std::fs::read_to_string(&path).ok()?; + let mut parts = content.split('\n').map(|s| s.to_string()); + Some(CommitInfo { + hash: parts.next()?, + short_hash: parts.next()?, + date: parts.next()?, + }) +} + +fn commit_info() { + // Var set by bootstrap whenever omit-git-hash is enabled in rust-lang/rust's config.toml. + println!("cargo:rerun-if-env-changed=CFG_OMIT_GIT_HASH"); + // ALLOWED: Accessing environment during build time shouldn't be prohibited. + #[allow(clippy::disallowed_methods)] + if std::env::var_os("CFG_OMIT_GIT_HASH").is_some() { + return; + } + + let Some(git) = commit_info_from_git().or_else(commit_info_from_rustc_source_tarball) else { + return; + }; + + println!("cargo:rustc-env=CARGO_COMMIT_HASH={}", git.hash); + println!("cargo:rustc-env=CARGO_COMMIT_SHORT_HASH={}", git.short_hash); + println!("cargo:rustc-env=CARGO_COMMIT_DATE={}", git.date); } #[allow(clippy::disallowed_methods)] diff --git a/crates/cargo-platform/Cargo.toml b/crates/cargo-platform/Cargo.toml index a42e2829a61..02dd6da8232 100644 --- a/crates/cargo-platform/Cargo.toml +++ b/crates/cargo-platform/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "cargo-platform" -version = "0.1.8" +version = "0.1.9" edition.workspace = true license.workspace = true rust-version.workspace = true diff --git a/crates/cargo-platform/README.md b/crates/cargo-platform/README.md new file mode 100644 index 00000000000..844bc081f70 --- /dev/null +++ b/crates/cargo-platform/README.md @@ -0,0 +1,2 @@ +> This crate is maintained by the Cargo team for use by the wider +> ecosystem. This crate follows semver compatibility for its APIs. diff --git a/crates/cargo-platform/src/lib.rs b/crates/cargo-platform/src/lib.rs index 7911e484e57..71e9140bae9 100644 --- a/crates/cargo-platform/src/lib.rs +++ b/crates/cargo-platform/src/lib.rs @@ -6,6 +6,9 @@ //! //! See `examples/matches.rs` for an example of how to match against a `Platform`. //! +//! > This crate is maintained by the Cargo team for use by the wider +//! > ecosystem. This crate follows semver compatibility for its APIs. +//! //! [`Platform`]: enum.Platform.html use std::fmt; diff --git a/crates/cargo-test-macro/Cargo.toml b/crates/cargo-test-macro/Cargo.toml index 68e621f0103..bb083da6718 100644 --- a/crates/cargo-test-macro/Cargo.toml +++ b/crates/cargo-test-macro/Cargo.toml @@ -1,8 +1,8 @@ [package] name = "cargo-test-macro" -version = "0.2.0" +version = "0.3.2" edition.workspace = true -rust-version = "1.77" # MSRV:1 +rust-version = "1.81" # MSRV:1 license.workspace = true homepage.workspace = true repository.workspace = true diff --git a/crates/cargo-test-macro/README.md b/crates/cargo-test-macro/README.md index 311d5ed6d66..ca004d0c62a 100644 --- a/crates/cargo-test-macro/README.md +++ b/crates/cargo-test-macro/README.md @@ -1,5 +1,3 @@ -WARNING: You might not want to use this outside of Cargo. - -* This is designed for testing Cargo itself. Use at your own risk. -* No guarantee on any stability across versions. -* No feature request would be accepted unless proved useful for testing Cargo. +> This crate is maintained by the Cargo team, primarily for use by Cargo +> and not intended for external use. This +> crate may make major changes to its APIs or be deprecated without warning. diff --git a/crates/cargo-test-macro/src/lib.rs b/crates/cargo-test-macro/src/lib.rs index 9d4a5fbaa99..93bf0ec08c6 100644 --- a/crates/cargo-test-macro/src/lib.rs +++ b/crates/cargo-test-macro/src/lib.rs @@ -3,17 +3,58 @@ //! This is meant to be consumed alongside `cargo-test-support`. See //! for a guide on writing tests. //! -//! WARNING: You might not want to use this outside of Cargo. -//! -//! * This is designed for testing Cargo itself. Use at your own risk. -//! * No guarantee on any stability across versions. -//! * No feature request would be accepted unless proved useful for testing Cargo. +//! > This crate is maintained by the Cargo team, primarily for use by Cargo +//! > and not intended for external use. This +//! > crate may make major changes to its APIs or be deprecated without warning. use proc_macro::*; use std::path::Path; use std::process::Command; use std::sync::Once; +/// Replacement for `#[test]` +/// +/// The `#[cargo_test]` attribute extends `#[test]` with some setup before starting the test. +/// It will create a filesystem "sandbox" under the "cargo integration test" directory for each test, such as `/path/to/cargo/target/tmp/cit/t123/`. +/// The sandbox will contain a `home` directory that will be used instead of your normal home directory. +/// +/// The `#[cargo_test]` attribute takes several options that will affect how the test is generated. +/// They are listed in parentheses separated with commas, such as: +/// +/// ```rust,ignore +/// #[cargo_test(nightly, reason = "-Zfoo is unstable")] +/// ``` +/// +/// The options it supports are: +/// +/// * `>=1.64` --- This indicates that the test will only run with the given version of `rustc` or newer. +/// This can be used when a new `rustc` feature has been stabilized that the test depends on. +/// If this is specified, a `reason` is required to explain why it is being checked. +/// * `nightly` --- This will cause the test to be ignored if not running on the nightly toolchain. +/// This is useful for tests that use unstable options in `rustc` or `rustdoc`. +/// These tests are run in Cargo's CI, but are disabled in rust-lang/rust's CI due to the difficulty of updating both repos simultaneously. +/// A `reason` field is required to explain why it is nightly-only. +/// * `requires_` --- This indicates a command that is required to be installed to be run. +/// For example, `requires_rustfmt` means the test will only run if the executable `rustfmt` is installed. +/// These tests are *always* run on CI. +/// This is mainly used to avoid requiring contributors from having every dependency installed. +/// * `build_std_real` --- This is a "real" `-Zbuild-std` test (in the `build_std` integration test). +/// This only runs on nightly, and only if the environment variable `CARGO_RUN_BUILD_STD_TESTS` is set (these tests on run on Linux). +/// * `build_std_mock` --- This is a "mock" `-Zbuild-std` test (which uses a mock standard library). +/// This only runs on nightly, and is disabled for windows-gnu. +/// * `public_network_test` --- This tests contacts the public internet. +/// These tests are disabled unless the `CARGO_PUBLIC_NETWORK_TESTS` environment variable is set. +/// Use of this should be *extremely rare*, please avoid using it if possible. +/// The hosts it contacts should have a relatively high confidence that they are reliable and stable (such as github.com), especially in CI. +/// The tests should be carefully considered for developer security and privacy as well. +/// * `container_test` --- This indicates that it is a test that uses Docker. +/// These tests are disabled unless the `CARGO_CONTAINER_TESTS` environment variable is set. +/// This requires that you have Docker installed. +/// The SSH tests also assume that you have OpenSSH installed. +/// These should work on Linux, macOS, and Windows where possible. +/// Unfortunately these tests are not run in CI for macOS or Windows (no Docker on macOS, and Windows does not support Linux images). +/// See [`cargo-test-support::containers`](https://doc.rust-lang.org/nightly/nightly-rustc/cargo_test_support/containers) for more on writing these tests. +/// * `ignore_windows="reason"` --- Indicates that the test should be ignored on windows for the given reason. #[proc_macro_attribute] pub fn cargo_test(attr: TokenStream, item: TokenStream) -> TokenStream { // Ideally these options would be embedded in the test itself. However, I diff --git a/crates/cargo-test-support/Cargo.toml b/crates/cargo-test-support/Cargo.toml index fd64a66efc7..c8420eb134c 100644 --- a/crates/cargo-test-support/Cargo.toml +++ b/crates/cargo-test-support/Cargo.toml @@ -1,16 +1,13 @@ [package] name = "cargo-test-support" -version = "0.2.0" +version = "0.5.0" edition.workspace = true -rust-version = "1.77" # MSRV:1 +rust-version = "1.81" # MSRV:1 license.workspace = true homepage.workspace = true repository.workspace = true description = "Testing framework for Cargo's testsuite." -[lib] -doctest = false - [dependencies] anstream.workspace = true anstyle.workspace = true @@ -24,6 +21,7 @@ git2.workspace = true glob.workspace = true itertools.workspace = true pasetors.workspace = true +regex.workspace = true serde = { workspace = true, features = ["derive"] } serde_json.workspace = true snapbox.workspace = true diff --git a/crates/cargo-test-support/README.md b/crates/cargo-test-support/README.md index 311d5ed6d66..ca004d0c62a 100644 --- a/crates/cargo-test-support/README.md +++ b/crates/cargo-test-support/README.md @@ -1,5 +1,3 @@ -WARNING: You might not want to use this outside of Cargo. - -* This is designed for testing Cargo itself. Use at your own risk. -* No guarantee on any stability across versions. -* No feature request would be accepted unless proved useful for testing Cargo. +> This crate is maintained by the Cargo team, primarily for use by Cargo +> and not intended for external use. This +> crate may make major changes to its APIs or be deprecated without warning. diff --git a/crates/cargo-test-support/build.rs b/crates/cargo-test-support/build.rs index 8854f461aa6..930666f140e 100644 --- a/crates/cargo-test-support/build.rs +++ b/crates/cargo-test-support/build.rs @@ -1,6 +1,7 @@ #![allow(clippy::disallowed_methods)] fn main() { + println!("cargo:rustc-check-cfg=cfg(emulate_second_only_system)"); println!( "cargo:rustc-env=NATIVE_ARCH={}", std::env::var("TARGET").unwrap() diff --git a/crates/cargo-test-support/containers/apache/Dockerfile b/crates/cargo-test-support/containers/apache/Dockerfile index 87260241018..926f4c35365 100644 --- a/crates/cargo-test-support/containers/apache/Dockerfile +++ b/crates/cargo-test-support/containers/apache/Dockerfile @@ -6,13 +6,13 @@ COPY bar /repos/bar WORKDIR /repos/bar RUN git config --global user.email "testuser@example.com" &&\ git config --global user.name "Test User" &&\ + git config --system --add safe.directory '*' &&\ git init -b master . &&\ git add Cargo.toml src &&\ git commit -m "Initial commit" &&\ - mv .git ../bar.git &&\ - cd ../bar.git &&\ - git config --bool core.bare true &&\ - rm -rf ../bar + cd .. &&\ + git clone --bare bar bar.git &&\ + rm -rf bar WORKDIR / EXPOSE 443 diff --git a/crates/cargo-test-support/containers/sshd/Dockerfile b/crates/cargo-test-support/containers/sshd/Dockerfile index 2c055bbc200..de491fea85e 100644 --- a/crates/cargo-test-support/containers/sshd/Dockerfile +++ b/crates/cargo-test-support/containers/sshd/Dockerfile @@ -1,4 +1,4 @@ -FROM alpine:3.19 +FROM alpine:3.20 RUN apk add --no-cache openssh git RUN ssh-keygen -A @@ -17,10 +17,9 @@ RUN git config --global user.email "testuser@example.com" &&\ git init -b master . &&\ git add Cargo.toml src &&\ git commit -m "Initial commit" &&\ - mv .git ../bar.git &&\ - cd ../bar.git &&\ - git config --bool core.bare true &&\ - rm -rf ../bar + cd .. &&\ + git clone --bare bar bar.git &&\ + rm -rf bar WORKDIR / USER root diff --git a/crates/cargo-test-support/src/compare.rs b/crates/cargo-test-support/src/compare.rs index 62559747d47..5b5d4b6ce85 100644 --- a/crates/cargo-test-support/src/compare.rs +++ b/crates/cargo-test-support/src/compare.rs @@ -1,6 +1,11 @@ //! Routines for comparing and diffing output. //! -//! # Patterns +//! # Deprecated comparisons +//! +//! Cargo's tests are in transition from internal-only pattern and normalization routines used in +//! asserts like [`crate::Execs::with_stdout_contains`] to [`assert_e2e`] and [`assert_ui`]. +//! +//! ## Patterns //! //! Many of these functions support special markup to assist with comparing //! text that may vary or is otherwise uninteresting for the test at hand. The @@ -22,7 +27,7 @@ //! can use this to avoid duplicating the `with_stderr` call like: //! `if cfg!(target_env = "msvc") {e.with_stderr("...[DIRTY]...");} else {e.with_stderr("...");}`. //! -//! # Normalization +//! ## Normalization //! //! In addition to the patterns described above, the strings are normalized //! in such a way to avoid unwanted differences. The normalizations are: @@ -36,17 +41,27 @@ //! a problem. //! - Carriage returns are removed, which can help when running on Windows. -use crate::diff; +use crate::cross_compile::try_alternate; use crate::paths; -use anyhow::{bail, Context, Result}; -use serde_json::Value; -use std::env; +use crate::{diff, rustc_host}; +use anyhow::{bail, Result}; use std::fmt; use std::path::Path; use std::str; use url::Url; -/// Default `snapbox` Assertions +/// This makes it easier to write regex replacements that are guaranteed to only +/// get compiled once +macro_rules! regex { + ($re:literal $(,)?) => {{ + static RE: std::sync::OnceLock = std::sync::OnceLock::new(); + RE.get_or_init(|| regex::Regex::new($re).unwrap()) + }}; +} + +/// Assertion policy for UI tests +/// +/// This emphasizes showing as much content as possible at the cost of more brittleness /// /// # Snapshots /// @@ -75,28 +90,245 @@ use url::Url; /// Other heuristics are applied to try to ensure Windows-style paths aren't /// a problem. /// - Carriage returns are removed, which can help when running on Windows. +/// +/// # Example +/// +/// ```no_run +/// # use cargo_test_support::compare::assert_e2e; +/// # use cargo_test_support::file; +/// # let p = cargo_test_support::project().build(); +/// # let stdout = ""; +/// assert_e2e().eq(stdout, file!["stderr.term.svg"]); +/// ``` +/// ```console +/// $ SNAPSHOTS=overwrite cargo test +/// ``` pub fn assert_ui() -> snapbox::Assert { + let mut subs = snapbox::Redactions::new(); + subs.extend(MIN_LITERAL_REDACTIONS.into_iter().cloned()) + .unwrap(); + add_test_support_redactions(&mut subs); + add_regex_redactions(&mut subs); + + snapbox::Assert::new() + .action_env(snapbox::assert::DEFAULT_ACTION_ENV) + .redact_with(subs) +} + +/// Assertion policy for functional end-to-end tests +/// +/// This emphasizes showing as much content as possible at the cost of more brittleness +/// +/// # Snapshots +/// +/// Updating of snapshots is controlled with the `SNAPSHOTS` environment variable: +/// +/// - `skip`: do not run the tests +/// - `ignore`: run the tests but ignore their failure +/// - `verify`: run the tests +/// - `overwrite`: update the snapshots based on the output of the tests +/// +/// # Patterns +/// +/// - `[..]` is a character wildcard, stopping at line breaks +/// - `\n...\n` is a multi-line wildcard +/// - `[EXE]` matches the exe suffix for the current platform +/// - `[ROOT]` matches [`paths::root()`][crate::paths::root] +/// - `[ROOTURL]` matches [`paths::root()`][crate::paths::root] as a URL +/// +/// # Normalization +/// +/// In addition to the patterns described above, text is normalized +/// in such a way to avoid unwanted differences. The normalizations are: +/// +/// - Backslashes are converted to forward slashes to deal with Windows paths. +/// This helps so that all tests can be written assuming forward slashes. +/// Other heuristics are applied to try to ensure Windows-style paths aren't +/// a problem. +/// - Carriage returns are removed, which can help when running on Windows. +/// +/// # Example +/// +/// ```no_run +/// # use cargo_test_support::compare::assert_e2e; +/// # use cargo_test_support::str; +/// # let p = cargo_test_support::project().build(); +/// assert_e2e().eq(p.read_lockfile(), str![]); +/// ``` +/// ```console +/// $ SNAPSHOTS=overwrite cargo test +/// ``` +pub fn assert_e2e() -> snapbox::Assert { + let mut subs = snapbox::Redactions::new(); + subs.extend(MIN_LITERAL_REDACTIONS.into_iter().cloned()) + .unwrap(); + subs.extend(E2E_LITERAL_REDACTIONS.into_iter().cloned()) + .unwrap(); + add_test_support_redactions(&mut subs); + add_regex_redactions(&mut subs); + + snapbox::Assert::new() + .action_env(snapbox::assert::DEFAULT_ACTION_ENV) + .redact_with(subs) +} + +fn add_test_support_redactions(subs: &mut snapbox::Redactions) { let root = paths::root(); // Use `from_file_path` instead of `from_dir_path` so the trailing slash is // put in the users output, rather than hidden in the variable let root_url = url::Url::from_file_path(&root).unwrap().to_string(); - let root = root.display().to_string(); - let mut subs = snapbox::Substitutions::new(); - subs.extend([ - ( - "[EXE]", - std::borrow::Cow::Borrowed(std::env::consts::EXE_SUFFIX), - ), - ("[ROOT]", std::borrow::Cow::Owned(root)), - ("[ROOTURL]", std::borrow::Cow::Owned(root_url)), - ]) + subs.insert("[ROOT]", root).unwrap(); + subs.insert("[ROOTURL]", root_url).unwrap(); + subs.insert("[HOST_TARGET]", rustc_host()).unwrap(); + if let Some(alt_target) = try_alternate() { + subs.insert("[ALT_TARGET]", alt_target).unwrap(); + } +} + +fn add_regex_redactions(subs: &mut snapbox::Redactions) { + // For e2e tests + subs.insert( + "[ELAPSED]", + regex!(r"\[FINISHED\].*in (?[0-9]+(\.[0-9]+)?(m [0-9]+)?)s"), + ) .unwrap(); - snapbox::Assert::new() - .action_env(snapbox::DEFAULT_ACTION_ENV) - .substitutions(subs) + // for UI tests + subs.insert( + "[ELAPSED]", + regex!(r"Finished.*in (?[0-9]+(\.[0-9]+)?(m [0-9]+)?)s"), + ) + .unwrap(); + // output from libtest + subs.insert( + "[ELAPSED]", + regex!(r"; finished in (?[0-9]+(\.[0-9]+)?(m [0-9]+)?)s"), + ) + .unwrap(); + subs.insert( + "[FILE_NUM]", + regex!(r"\[(REMOVED|SUMMARY)\] (?[1-9][0-9]*) files"), + ) + .unwrap(); + subs.insert( + "[FILE_SIZE]", + regex!(r"(?[0-9]+(\.[0-9]+)?([a-zA-Z]i)?)B\s"), + ) + .unwrap(); + subs.insert( + "[HASH]", + regex!(r"home/\.cargo/registry/(cache|index|src)/-(?[a-z0-9]+)"), + ) + .unwrap(); + subs.insert( + "[HASH]", + regex!(r"\.cargo/target/(?[0-9a-f]{2}/[0-9a-f]{14})"), + ) + .unwrap(); + subs.insert("[HASH]", regex!(r"/[a-z0-9\-_]+-(?[0-9a-f]{16})")) + .unwrap(); + subs.insert( + "[AVG_ELAPSED]", + regex!(r"(?[0-9]+(\.[0-9]+)?) ns/iter"), + ) + .unwrap(); + subs.insert( + "[JITTER]", + regex!(r"ns/iter \(\+/- (?[0-9]+(\.[0-9]+)?)\)"), + ) + .unwrap(); + + // Following 3 subs redact: + // "1719325877.527949100s, 61549498ns after last build at 1719325877.466399602s" + // "1719503592.218193216s, 1h 1s after last build at 1719499991.982681034s" + // into "[DIRTY_REASON_NEW_TIME], [DIRTY_REASON_DIFF] after last build at [DIRTY_REASON_OLD_TIME]" + subs.insert( + "[TIME_DIFF_AFTER_LAST_BUILD]", + regex!(r"(?[0-9]+(\.[0-9]+)?s, (\s?[0-9]+(\.[0-9]+)?(s|ns|h))+ after last build at [0-9]+(\.[0-9]+)?s)"), + ) + .unwrap(); } +static MIN_LITERAL_REDACTIONS: &[(&str, &str)] = &[ + ("[EXE]", std::env::consts::EXE_SUFFIX), + ("[BROKEN_PIPE]", "Broken pipe (os error 32)"), + ("[BROKEN_PIPE]", "The pipe is being closed. (os error 232)"), + // Unix message for an entity was not found + ("[NOT_FOUND]", "No such file or directory (os error 2)"), + // Windows message for an entity was not found + ( + "[NOT_FOUND]", + "The system cannot find the file specified. (os error 2)", + ), + ( + "[NOT_FOUND]", + "The system cannot find the path specified. (os error 3)", + ), + ("[NOT_FOUND]", "Access is denied. (os error 5)"), + ("[NOT_FOUND]", "program not found"), + // Unix message for exit status + ("[EXIT_STATUS]", "exit status"), + // Windows message for exit status + ("[EXIT_STATUS]", "exit code"), +]; +static E2E_LITERAL_REDACTIONS: &[(&str, &str)] = &[ + ("[RUNNING]", " Running"), + ("[COMPILING]", " Compiling"), + ("[CHECKING]", " Checking"), + ("[COMPLETED]", " Completed"), + ("[CREATED]", " Created"), + ("[CREATING]", " Creating"), + ("[CREDENTIAL]", " Credential"), + ("[DOWNGRADING]", " Downgrading"), + ("[FINISHED]", " Finished"), + ("[ERROR]", "error:"), + ("[WARNING]", "warning:"), + ("[NOTE]", "note:"), + ("[HELP]", "help:"), + ("[DOCUMENTING]", " Documenting"), + ("[SCRAPING]", " Scraping"), + ("[FRESH]", " Fresh"), + ("[DIRTY]", " Dirty"), + ("[LOCKING]", " Locking"), + ("[UPDATING]", " Updating"), + ("[UPGRADING]", " Upgrading"), + ("[ADDING]", " Adding"), + ("[REMOVING]", " Removing"), + ("[REMOVED]", " Removed"), + ("[UNCHANGED]", " Unchanged"), + ("[DOCTEST]", " Doc-tests"), + ("[PACKAGING]", " Packaging"), + ("[PACKAGED]", " Packaged"), + ("[DOWNLOADING]", " Downloading"), + ("[DOWNLOADED]", " Downloaded"), + ("[UPLOADING]", " Uploading"), + ("[UPLOADED]", " Uploaded"), + ("[VERIFYING]", " Verifying"), + ("[ARCHIVING]", " Archiving"), + ("[INSTALLING]", " Installing"), + ("[REPLACING]", " Replacing"), + ("[UNPACKING]", " Unpacking"), + ("[SUMMARY]", " Summary"), + ("[FIXED]", " Fixed"), + ("[FIXING]", " Fixing"), + ("[IGNORED]", " Ignored"), + ("[INSTALLED]", " Installed"), + ("[REPLACED]", " Replaced"), + ("[BUILDING]", " Building"), + ("[LOGIN]", " Login"), + ("[LOGOUT]", " Logout"), + ("[YANK]", " Yank"), + ("[OWNER]", " Owner"), + ("[MIGRATING]", " Migrating"), + ("[EXECUTABLE]", " Executable"), + ("[SKIPPING]", " Skipping"), + ("[WAITING]", " Waiting"), + ("[PUBLISHED]", " Published"), + ("[BLOCKING]", " Blocking"), + ("[GENERATED]", " Generated"), + ("[OPENING]", " Opening"), +]; + /// Normalizes the output so that it can be compared against the expected value. fn normalize_actual(actual: &str, cwd: Option<&Path>) -> String { // It's easier to read tabs in outputs if they don't show up as literal @@ -186,64 +418,11 @@ fn normalize_windows(text: &str, cwd: Option<&Path>) -> String { } fn substitute_macros(input: &str) -> String { - let macros = [ - ("[RUNNING]", " Running"), - ("[COMPILING]", " Compiling"), - ("[CHECKING]", " Checking"), - ("[COMPLETED]", " Completed"), - ("[CREATED]", " Created"), - ("[CREATING]", " Creating"), - ("[CREDENTIAL]", " Credential"), - ("[DOWNGRADING]", " Downgrading"), - ("[FINISHED]", " Finished"), - ("[ERROR]", "error:"), - ("[WARNING]", "warning:"), - ("[NOTE]", "note:"), - ("[HELP]", "help:"), - ("[DOCUMENTING]", " Documenting"), - ("[SCRAPING]", " Scraping"), - ("[FRESH]", " Fresh"), - ("[DIRTY]", " Dirty"), - ("[LOCKING]", " Locking"), - ("[UPDATING]", " Updating"), - ("[ADDING]", " Adding"), - ("[REMOVING]", " Removing"), - ("[REMOVED]", " Removed"), - ("[UNCHANGED]", " Unchanged"), - ("[DOCTEST]", " Doc-tests"), - ("[PACKAGING]", " Packaging"), - ("[PACKAGED]", " Packaged"), - ("[DOWNLOADING]", " Downloading"), - ("[DOWNLOADED]", " Downloaded"), - ("[UPLOADING]", " Uploading"), - ("[UPLOADED]", " Uploaded"), - ("[VERIFYING]", " Verifying"), - ("[ARCHIVING]", " Archiving"), - ("[INSTALLING]", " Installing"), - ("[REPLACING]", " Replacing"), - ("[UNPACKING]", " Unpacking"), - ("[SUMMARY]", " Summary"), - ("[FIXED]", " Fixed"), - ("[FIXING]", " Fixing"), - ("[EXE]", env::consts::EXE_SUFFIX), - ("[IGNORED]", " Ignored"), - ("[INSTALLED]", " Installed"), - ("[REPLACED]", " Replaced"), - ("[BUILDING]", " Building"), - ("[LOGIN]", " Login"), - ("[LOGOUT]", " Logout"), - ("[YANK]", " Yank"), - ("[OWNER]", " Owner"), - ("[MIGRATING]", " Migrating"), - ("[EXECUTABLE]", " Executable"), - ("[SKIPPING]", " Skipping"), - ("[WAITING]", " Waiting"), - ("[PUBLISHED]", " Published"), - ("[BLOCKING]", " Blocking"), - ("[GENERATED]", " Generated"), - ]; let mut result = input.to_owned(); - for &(pat, subst) in ¯os { + for &(pat, subst) in MIN_LITERAL_REDACTIONS { + result = result.replace(pat, subst) + } + for &(pat, subst) in E2E_LITERAL_REDACTIONS { result = result.replace(pat, subst) } result @@ -255,7 +434,7 @@ fn substitute_macros(input: &str) -> String { /// /// - `description` explains where the output is from (usually "stdout" or "stderr"). /// - `other_output` is other output to display in the error (usually stdout or stderr). -pub fn match_exact( +pub(crate) fn match_exact( expected: &str, actual: &str, description: &str, @@ -283,7 +462,7 @@ pub fn match_exact( /// Convenience wrapper around [`match_exact`] which will panic on error. #[track_caller] -pub fn assert_match_exact(expected: &str, actual: &str) { +pub(crate) fn assert_match_exact(expected: &str, actual: &str) { if let Err(e) = match_exact(expected, actual, "", "", None) { crate::panic_error("", e); } @@ -293,7 +472,7 @@ pub fn assert_match_exact(expected: &str, actual: &str) { /// of the lines. /// /// See [Patterns](index.html#patterns) for more information on pattern matching. -pub fn match_unordered(expected: &str, actual: &str, cwd: Option<&Path>) -> Result<()> { +pub(crate) fn match_unordered(expected: &str, actual: &str, cwd: Option<&Path>) -> Result<()> { let expected = normalize_expected(expected, cwd); let actual = normalize_actual(actual, cwd); let e: Vec<_> = expected.lines().map(|line| WildStr::new(line)).collect(); @@ -343,7 +522,7 @@ pub fn match_unordered(expected: &str, actual: &str, cwd: Option<&Path>) -> Resu /// somewhere. /// /// See [Patterns](index.html#patterns) for more information on pattern matching. -pub fn match_contains(expected: &str, actual: &str, cwd: Option<&Path>) -> Result<()> { +pub(crate) fn match_contains(expected: &str, actual: &str, cwd: Option<&Path>) -> Result<()> { let expected = normalize_expected(expected, cwd); let actual = normalize_actual(actual, cwd); let e: Vec<_> = expected.lines().map(|line| WildStr::new(line)).collect(); @@ -370,7 +549,11 @@ pub fn match_contains(expected: &str, actual: &str, cwd: Option<&Path>) -> Resul /// anywhere. /// /// See [Patterns](index.html#patterns) for more information on pattern matching. -pub fn match_does_not_contain(expected: &str, actual: &str, cwd: Option<&Path>) -> Result<()> { +pub(crate) fn match_does_not_contain( + expected: &str, + actual: &str, + cwd: Option<&Path>, +) -> Result<()> { if match_contains(expected, actual, cwd).is_ok() { bail!( "expected not to find:\n\ @@ -389,7 +572,7 @@ pub fn match_does_not_contain(expected: &str, actual: &str, cwd: Option<&Path>) /// somewhere, and should be repeated `number` times. /// /// See [Patterns](index.html#patterns) for more information on pattern matching. -pub fn match_contains_n( +pub(crate) fn match_contains_n( expected: &str, number: usize, actual: &str, @@ -426,7 +609,7 @@ pub fn match_contains_n( /// /// See [`crate::Execs::with_stderr_line_without`] for an example and cautions /// against using. -pub fn match_with_without( +pub(crate) fn match_with_without( actual: &str, with: &[String], without: &[String], @@ -470,157 +653,8 @@ pub fn match_with_without( } } -/// Checks that the given string of JSON objects match the given set of -/// expected JSON objects. -/// -/// See [`crate::Execs::with_json`] for more details. -pub fn match_json(expected: &str, actual: &str, cwd: Option<&Path>) -> Result<()> { - let (exp_objs, act_objs) = collect_json_objects(expected, actual)?; - if exp_objs.len() != act_objs.len() { - bail!( - "expected {} json lines, got {}, stdout:\n{}", - exp_objs.len(), - act_objs.len(), - actual - ); - } - for (exp_obj, act_obj) in exp_objs.iter().zip(act_objs) { - find_json_mismatch(exp_obj, &act_obj, cwd)?; - } - Ok(()) -} - -/// Checks that the given string of JSON objects match the given set of -/// expected JSON objects, ignoring their order. -/// -/// See [`crate::Execs::with_json_contains_unordered`] for more details and -/// cautions when using. -pub fn match_json_contains_unordered( - expected: &str, - actual: &str, - cwd: Option<&Path>, -) -> Result<()> { - let (exp_objs, mut act_objs) = collect_json_objects(expected, actual)?; - for exp_obj in exp_objs { - match act_objs - .iter() - .position(|act_obj| find_json_mismatch(&exp_obj, act_obj, cwd).is_ok()) - { - Some(index) => act_objs.remove(index), - None => { - bail!( - "Did not find expected JSON:\n\ - {}\n\ - Remaining available output:\n\ - {}\n", - serde_json::to_string_pretty(&exp_obj).unwrap(), - itertools::join( - act_objs.iter().map(|o| serde_json::to_string(o).unwrap()), - "\n" - ) - ); - } - }; - } - Ok(()) -} - -fn collect_json_objects( - expected: &str, - actual: &str, -) -> Result<(Vec, Vec)> { - let expected_objs: Vec<_> = expected - .split("\n\n") - .map(|expect| { - expect - .parse() - .with_context(|| format!("failed to parse expected JSON object:\n{}", expect)) - }) - .collect::>()?; - let actual_objs: Vec<_> = actual - .lines() - .filter(|line| line.starts_with('{')) - .map(|line| { - line.parse() - .with_context(|| format!("failed to parse JSON object:\n{}", line)) - }) - .collect::>()?; - Ok((expected_objs, actual_objs)) -} - -/// Compares JSON object for approximate equality. -/// You can use `[..]` wildcard in strings (useful for OS-dependent things such -/// as paths). You can use a `"{...}"` string literal as a wildcard for -/// arbitrary nested JSON (useful for parts of object emitted by other programs -/// (e.g., rustc) rather than Cargo itself). -pub fn find_json_mismatch(expected: &Value, actual: &Value, cwd: Option<&Path>) -> Result<()> { - match find_json_mismatch_r(expected, actual, cwd) { - Some((expected_part, actual_part)) => bail!( - "JSON mismatch\nExpected:\n{}\nWas:\n{}\nExpected part:\n{}\nActual part:\n{}\n", - serde_json::to_string_pretty(expected).unwrap(), - serde_json::to_string_pretty(&actual).unwrap(), - serde_json::to_string_pretty(expected_part).unwrap(), - serde_json::to_string_pretty(actual_part).unwrap(), - ), - None => Ok(()), - } -} - -fn find_json_mismatch_r<'a>( - expected: &'a Value, - actual: &'a Value, - cwd: Option<&Path>, -) -> Option<(&'a Value, &'a Value)> { - use serde_json::Value::*; - match (expected, actual) { - (&Number(ref l), &Number(ref r)) if l == r => None, - (&Bool(l), &Bool(r)) if l == r => None, - (&String(ref l), _) if l == "{...}" => None, - (&String(ref l), &String(ref r)) => { - if match_exact(l, r, "", "", cwd).is_err() { - Some((expected, actual)) - } else { - None - } - } - (&Array(ref l), &Array(ref r)) => { - if l.len() != r.len() { - return Some((expected, actual)); - } - - l.iter() - .zip(r.iter()) - .filter_map(|(l, r)| find_json_mismatch_r(l, r, cwd)) - .next() - } - (&Object(ref l), &Object(ref r)) => { - let mut expected_entries = l.iter(); - let mut actual_entries = r.iter(); - - loop { - match (expected_entries.next(), actual_entries.next()) { - (None, None) => return None, - (Some((expected_key, expected_value)), Some((actual_key, actual_value))) - if expected_key == actual_key => - { - if let mismatch @ Some(_) = - find_json_mismatch_r(expected_value, actual_value, cwd) - { - return mismatch; - } - } - _ => return Some((expected, actual)), - } - } - } - (&Null, &Null) => None, - // Magic string literal `"{...}"` acts as wildcard for any sub-JSON. - _ => Some((expected, actual)), - } -} - /// A single line string that supports `[..]` wildcard matching. -pub struct WildStr<'a> { +pub(crate) struct WildStr<'a> { has_meta: bool, line: &'a str, } @@ -672,129 +706,153 @@ impl fmt::Debug for WildStr<'_> { } } -#[test] -fn wild_str_cmp() { - for (a, b) in &[ - ("a b", "a b"), - ("a[..]b", "a b"), - ("a[..]", "a b"), - ("[..]", "a b"), - ("[..]b", "a b"), - ] { - assert_eq!(WildStr::new(a), WildStr::new(b)); - } - for (a, b) in &[("[..]b", "c"), ("b", "c"), ("b", "cb")] { - assert_ne!(WildStr::new(a), WildStr::new(b)); +#[cfg(test)] +mod test { + use snapbox::assert_data_eq; + use snapbox::prelude::*; + use snapbox::str; + + use super::*; + + #[test] + fn wild_str_cmp() { + for (a, b) in &[ + ("a b", "a b"), + ("a[..]b", "a b"), + ("a[..]", "a b"), + ("[..]", "a b"), + ("[..]b", "a b"), + ] { + assert_eq!(WildStr::new(a), WildStr::new(b)); + } + for (a, b) in &[("[..]b", "c"), ("b", "c"), ("b", "cb")] { + assert_ne!(WildStr::new(a), WildStr::new(b)); + } } -} -#[test] -fn dirty_msvc() { - let case = |expected: &str, wild: &str, msvc: bool| { - assert_eq!(expected, &replace_dirty_msvc_impl(wild, msvc)); - }; + #[test] + fn dirty_msvc() { + let case = |expected: &str, wild: &str, msvc: bool| { + assert_eq!(expected, &replace_dirty_msvc_impl(wild, msvc)); + }; - // no replacements - case("aa", "aa", false); - case("aa", "aa", true); + // no replacements + case("aa", "aa", false); + case("aa", "aa", true); - // with replacements - case( - "\ + // with replacements + case( + "\ [DIRTY] a", - "\ + "\ [DIRTY-MSVC] a", - true, - ); - case( - "", - "\ + true, + ); + case( + "", + "\ [DIRTY-MSVC] a", - false, - ); - case( - "\ + false, + ); + case( + "\ [DIRTY] a [COMPILING] a", - "\ + "\ [DIRTY-MSVC] a [COMPILING] a", - true, - ); - case( - "\ + true, + ); + case( + "\ [COMPILING] a", - "\ + "\ [DIRTY-MSVC] a [COMPILING] a", - false, - ); + false, + ); - // test trailing newline behavior - case( - "\ + // test trailing newline behavior + case( + "\ A B ", "\ A B ", true, - ); + ); - case( - "\ + case( + "\ A B ", "\ A B ", false, - ); + ); - case( - "\ + case( + "\ A B", "\ A B", true, - ); + ); - case( - "\ + case( + "\ A B", "\ A B", false, - ); + ); - case( - "\ + case( + "\ [DIRTY] a ", - "\ + "\ [DIRTY-MSVC] a ", - true, - ); - case( - "\n", - "\ + true, + ); + case( + "\n", + "\ [DIRTY-MSVC] a ", - false, - ); + false, + ); - case( - "\ + case( + "\ [DIRTY] a", - "\ + "\ [DIRTY-MSVC] a", - true, - ); - case( - "", - "\ + true, + ); + case( + "", + "\ [DIRTY-MSVC] a", - false, - ); + false, + ); + } + + #[test] + fn redact_elapsed_time() { + let mut subs = snapbox::Redactions::new(); + add_regex_redactions(&mut subs); + + assert_data_eq!( + subs.redact("[FINISHED] `release` profile [optimized] target(s) in 5.5s"), + str!["[FINISHED] `release` profile [optimized] target(s) in [ELAPSED]s"].raw() + ); + assert_data_eq!( + subs.redact("[FINISHED] `release` profile [optimized] target(s) in 1m 05s"), + str!["[FINISHED] `release` profile [optimized] target(s) in [ELAPSED]s"].raw() + ); + } } diff --git a/crates/cargo-test-support/src/containers.rs b/crates/cargo-test-support/src/containers.rs index 22fd5fd855e..4ce6e1d5048 100644 --- a/crates/cargo-test-support/src/containers.rs +++ b/crates/cargo-test-support/src/containers.rs @@ -122,6 +122,7 @@ impl Container { return; } let mut ar = tar::Builder::new(Vec::new()); + ar.sparse(false); let files = std::mem::replace(&mut self.files, Vec::new()); for mut file in files { ar.append_data(&mut file.header, &file.path, file.contents.as_slice()) diff --git a/crates/cargo-test-support/src/cross_compile.rs b/crates/cargo-test-support/src/cross_compile.rs index a2daf882d9d..beda6616550 100644 --- a/crates/cargo-test-support/src/cross_compile.rs +++ b/crates/cargo-test-support/src/cross_compile.rs @@ -209,18 +209,23 @@ pub fn native_arch() -> &'static str { /// /// Only use this function on tests that check `cross_compile::disabled`. pub fn alternate() -> &'static str { + try_alternate().expect("This test should be gated on cross_compile::disabled.") +} + +/// A possible alternate target-triple to build with. +pub(crate) fn try_alternate() -> Option<&'static str> { if cfg!(all(target_os = "macos", target_arch = "aarch64")) { - "x86_64-apple-darwin" + Some("x86_64-apple-darwin") } else if cfg!(target_os = "macos") { - "x86_64-apple-ios" + Some("x86_64-apple-ios") } else if cfg!(target_os = "linux") { - "i686-unknown-linux-gnu" + Some("i686-unknown-linux-gnu") } else if cfg!(all(target_os = "windows", target_env = "msvc")) { - "i686-pc-windows-msvc" + Some("i686-pc-windows-msvc") } else if cfg!(all(target_os = "windows", target_env = "gnu")) { - "i686-pc-windows-gnu" + Some("i686-pc-windows-gnu") } else { - panic!("This test should be gated on cross_compile::disabled."); + None } } diff --git a/crates/cargo-test-support/src/git.rs b/crates/cargo-test-support/src/git.rs index 2b838e8c991..803e4dafd49 100644 --- a/crates/cargo-test-support/src/git.rs +++ b/crates/cargo-test-support/src/git.rs @@ -1,60 +1,66 @@ -/* -# Git Testing Support - -## Creating a git dependency -`git::new()` is an easy way to create a new git repository containing a -project that you can then use as a dependency. It will automatically add all -the files you specify in the project and commit them to the repository. -Example: - -``` -let git_project = git::new("dep1", |project| { - project - .file("Cargo.toml", &basic_manifest("dep1", "1.0.0")) - .file("src/lib.rs", r#"pub fn f() { println!("hi!"); } "#) -}); - -// Use the `url()` method to get the file url to the new repository. -let p = project() - .file("Cargo.toml", &format!(r#" - [package] - name = "a" - version = "1.0.0" - - [dependencies] - dep1 = {{ git = '{}' }} - "#, git_project.url())) - .file("src/lib.rs", "extern crate dep1;") - .build(); -``` - -## Manually creating repositories -`git::repo()` can be used to create a `RepoBuilder` which provides a way of -adding files to a blank repository and committing them. - -If you want to then manipulate the repository (such as adding new files or -tags), you can use `git2::Repository::open()` to open the repository and then -use some of the helper functions in this file to interact with the repository. - -*/ - -use crate::{path2url, project, Project, ProjectBuilder, SymlinkBuilder}; +//! # Git Testing Support +//! +//! ## Creating a git dependency +//! [`new()`] is an easy way to create a new git repository containing a +//! project that you can then use as a dependency. It will automatically add all +//! the files you specify in the project and commit them to the repository. +//! +//! ### Example: +//! +//! ```no_run +//! # use cargo_test_support::project; +//! # use cargo_test_support::basic_manifest; +//! # use cargo_test_support::git; +//! let git_project = git::new("dep1", |project| { +//! project +//! .file("Cargo.toml", &basic_manifest("dep1", "1.0.0")) +//! .file("src/lib.rs", r#"pub fn f() { println!("hi!"); } "#) +//! }); +//! +//! // Use the `url()` method to get the file url to the new repository. +//! let p = project() +//! .file("Cargo.toml", &format!(r#" +//! [package] +//! name = "a" +//! version = "1.0.0" +//! +//! [dependencies] +//! dep1 = {{ git = '{}' }} +//! "#, git_project.url())) +//! .file("src/lib.rs", "extern crate dep1;") +//! .build(); +//! ``` +//! +//! ## Manually creating repositories +//! +//! [`repo()`] can be used to create a [`RepoBuilder`] which provides a way of +//! adding files to a blank repository and committing them. +//! +//! If you want to then manipulate the repository (such as adding new files or +//! tags), you can use `git2::Repository::open()` to open the repository and then +//! use some of the helper functions in this file to interact with the repository. + +use crate::{paths::CargoPathExt, project, Project, ProjectBuilder, SymlinkBuilder}; use std::fs; use std::path::{Path, PathBuf}; use std::sync::Once; use url::Url; +/// Manually construct a [`Repository`] +/// +/// See also [`new`], [`repo`] #[must_use] pub struct RepoBuilder { repo: git2::Repository, files: Vec, } +/// See [`new`] pub struct Repository(git2::Repository); -/// Create a `RepoBuilder` to build a new git repository. +/// Create a [`RepoBuilder`] to build a new git repository. /// -/// Call `build()` to finalize and create the repository. +/// Call [`RepoBuilder::build()`] to finalize and create the repository. pub fn repo(p: &Path) -> RepoBuilder { RepoBuilder::init(p) } @@ -118,7 +124,7 @@ impl Repository { } pub fn url(&self) -> Url { - path2url(self.0.workdir().unwrap().to_path_buf()) + self.0.workdir().unwrap().to_url() } pub fn revparse_head(&self) -> String { @@ -130,7 +136,7 @@ impl Repository { } } -/// Initialize a new repository at the given path. +/// *(`git2`)* Initialize a new repository at the given path. pub fn init(path: &Path) -> git2::Repository { default_search_path(); let repo = t!(git2::Repository::init(path)); @@ -158,7 +164,7 @@ fn default_repo_cfg(repo: &git2::Repository) { t!(cfg.set_str("user.name", "Foo Bar")); } -/// Create a new git repository with a project. +/// Create a new [`Project`] in a git [`Repository`] pub fn new(name: &str, callback: F) -> Project where F: FnOnce(ProjectBuilder) -> ProjectBuilder, @@ -166,8 +172,7 @@ where new_repo(name, callback).0 } -/// Create a new git repository with a project. -/// Returns both the Project and the git Repository. +/// Create a new [`Project`] with access to the [`Repository`] pub fn new_repo(name: &str, callback: F) -> (Project, git2::Repository) where F: FnOnce(ProjectBuilder) -> ProjectBuilder, @@ -182,14 +187,14 @@ where (git_project, repo) } -/// Add all files in the working directory to the git index. +/// *(`git2`)* Add all files in the working directory to the git index pub fn add(repo: &git2::Repository) { let mut index = t!(repo.index()); t!(index.add_all(["*"].iter(), git2::IndexAddOption::DEFAULT, None)); t!(index.write()); } -/// Add a git submodule to the repository. +/// *(`git2`)* Add a git submodule to the repository pub fn add_submodule<'a>( repo: &'a git2::Repository, url: &str, @@ -207,7 +212,7 @@ pub fn add_submodule<'a>( s } -/// Commit changes to the git repository. +/// *(`git2`)* Commit changes to the git repository pub fn commit(repo: &git2::Repository) -> git2::Oid { let tree_id = t!(t!(repo.index()).write_tree()); let sig = t!(repo.signature()); @@ -226,7 +231,7 @@ pub fn commit(repo: &git2::Repository) -> git2::Oid { )) } -/// Create a new tag in the git repository. +/// *(`git2`)* Create a new tag in the git repository pub fn tag(repo: &git2::Repository, name: &str) { let head = repo.head().unwrap().target().unwrap(); t!(repo.tag( diff --git a/crates/cargo-test-support/src/install.rs b/crates/cargo-test-support/src/install.rs index 02842ef7bba..3a8503d75d8 100644 --- a/crates/cargo-test-support/src/install.rs +++ b/crates/cargo-test-support/src/install.rs @@ -1,14 +1,15 @@ -use crate::paths; +//! Helpers for testing `cargo install` + use std::env::consts::EXE_SUFFIX; -use std::path::{Path, PathBuf}; +use std::path::Path; /// Used by `cargo install` tests to assert an executable binary /// has been installed. Example usage: /// ```no_run /// use cargo_test_support::install::assert_has_installed_exe; -/// use cargo_test_support::install::cargo_home; +/// use cargo_test_support::paths; /// -/// assert_has_installed_exe(cargo_home(), "foo"); +/// assert_has_installed_exe(paths::cargo_home(), "foo"); /// ``` #[track_caller] pub fn assert_has_installed_exe>(path: P, name: &'static str) { @@ -24,10 +25,7 @@ fn check_has_installed_exe>(path: P, name: &'static str) -> bool path.as_ref().join("bin").join(exe(name)).is_file() } -pub fn cargo_home() -> PathBuf { - paths::home().join(".cargo") -} - +/// `$name$EXE` pub fn exe(name: &str) -> String { format!("{}{}", name, EXE_SUFFIX) } diff --git a/crates/cargo-test-support/src/lib.rs b/crates/cargo-test-support/src/lib.rs index a02f8293380..476e185dd73 100644 --- a/crates/cargo-test-support/src/lib.rs +++ b/crates/cargo-test-support/src/lib.rs @@ -2,11 +2,42 @@ //! //! See for a guide on writing tests. //! -//! WARNING: You might not want to use this outside of Cargo. +//! There are two places you can find API documentation //! -//! * This is designed for testing Cargo itself. Use at your own risk. -//! * No guarantee on any stability across versions. -//! * No feature request would be accepted unless proved useful for testing Cargo. +//! - : +//! targeted at external tool developers testing cargo-related code +//! - Released with every rustc release +//! - : +//! targeted at cargo contributors +//! - Updated on each update of the `cargo` submodule in `rust-lang/rust` +//! +//! > This crate is maintained by the Cargo team, primarily for use by Cargo +//! > and not intended for external use. This +//! > crate may make major changes to its APIs or be deprecated without warning. +//! +//! # Example +//! +//! ```rust,no_run +//! use cargo_test_support::prelude::*; +//! use cargo_test_support::str; +//! use cargo_test_support::project; +//! +//! #[cargo_test] +//! fn some_test() { +//! let p = project() +//! .file("src/main.rs", r#"fn main() { println!("hi!"); }"#) +//! .build(); +//! +//! p.cargo("run --bin foo") +//! .with_stderr_data(str![[r#" +//! [COMPILING] foo [..] +//! [FINISHED] [..] +//! [RUNNING] `target/debug/foo` +//! "#]]) +//! .with_stdout_data(str![["hi!"]]) +//! .run(); +//! } +//! ``` #![allow(clippy::disallowed_methods)] #![allow(clippy::print_stderr)] @@ -24,11 +55,20 @@ use std::thread::JoinHandle; use std::time::{self, Duration}; use anyhow::{bail, Result}; -use cargo_util::{is_ci, ProcessBuilder, ProcessError}; +use cargo_util::{is_ci, ProcessError}; +use snapbox::IntoData as _; use url::Url; use self::paths::CargoPathExt; +/// Unwrap a `Result` with a useful panic message +/// +/// # Example +/// +/// ```rust +/// use cargo_test_support::t; +/// t!(std::fs::read_to_string("Cargo.toml")); +/// ``` #[macro_export] macro_rules! t { ($e:expr) => { @@ -39,10 +79,12 @@ macro_rules! t { }; } +pub use cargo_util::ProcessBuilder; pub use snapbox::file; -pub use snapbox::path::current_dir; pub use snapbox::str; +pub use snapbox::utils::current_dir; +/// `panic!`, reporting the specified error , see also [`t!`] #[track_caller] pub fn panic_error(what: &str, err: impl Into) -> ! { let err = err.into(); @@ -72,10 +114,13 @@ pub mod registry; pub mod tools; pub mod prelude { - pub use crate::ArgLine; - pub use crate::CargoCommand; - pub use crate::ChannelChanger; - pub use crate::TestEnv; + pub use crate::cargo_test; + pub use crate::paths::CargoPathExt; + pub use crate::ArgLineCommandExt; + pub use crate::CargoCommandExt; + pub use crate::ChannelChangerCommandExt; + pub use crate::TestEnvCommandExt; + pub use snapbox::IntoData; } /* @@ -186,7 +231,13 @@ pub struct Project { /// Create a project to run tests against /// -/// The project can be constructed programmatically or from the filesystem with [`Project::from_template`] +/// - Creates a [`basic_manifest`] if one isn't supplied +/// +/// To get started, see: +/// - [`project`] +/// - [`project_in`] +/// - [`project_in_home`] +/// - [`Project::from_template`] #[must_use] pub struct ProjectBuilder { root: Project, @@ -196,16 +247,21 @@ pub struct ProjectBuilder { } impl ProjectBuilder { - /// Root of the project, ex: `/path/to/cargo/target/cit/t0/foo` + /// Root of the project + /// + /// ex: `$CARGO_TARGET_TMPDIR/cit/t0/foo` pub fn root(&self) -> PathBuf { self.root.root() } - /// Project's debug dir, ex: `/path/to/cargo/target/cit/t0/foo/target/debug` + /// Project's debug dir + /// + /// ex: `$CARGO_TARGET_TMPDIR/cit/t0/foo/target/debug` pub fn target_debug_dir(&self) -> PathBuf { self.root.target_debug_dir() } + /// Create project in `root` pub fn new(root: PathBuf) -> ProjectBuilder { ProjectBuilder { root: Project { root }, @@ -215,6 +271,7 @@ impl ProjectBuilder { } } + /// Create project, relative to [`paths::root`] pub fn at>(mut self, path: P) -> Self { self.root = Project { root: paths::root().join(path), @@ -315,33 +372,44 @@ impl Project { pub fn from_template(template_path: impl AsRef) -> Self { let root = paths::root(); let project_root = root.join("case"); - snapbox::path::copy_template(template_path.as_ref(), &project_root).unwrap(); + snapbox::dir::copy_template(template_path.as_ref(), &project_root).unwrap(); Self { root: project_root } } - /// Root of the project, ex: `/path/to/cargo/target/cit/t0/foo` + /// Root of the project + /// + /// ex: `$CARGO_TARGET_TMPDIR/cit/t0/foo` pub fn root(&self) -> PathBuf { self.root.clone() } - /// Project's target dir, ex: `/path/to/cargo/target/cit/t0/foo/target` + /// Project's target dir + /// + /// ex: `$CARGO_TARGET_TMPDIR/cit/t0/foo/target` pub fn build_dir(&self) -> PathBuf { self.root().join("target") } - /// Project's debug dir, ex: `/path/to/cargo/target/cit/t0/foo/target/debug` + /// Project's debug dir + /// + /// ex: `$CARGO_TARGET_TMPDIR/cit/t0/foo/target/debug` pub fn target_debug_dir(&self) -> PathBuf { self.build_dir().join("debug") } - /// File url for root, ex: `file:///path/to/cargo/target/cit/t0/foo` + /// File url for root + /// + /// ex: `file://$CARGO_TARGET_TMPDIR/cit/t0/foo` pub fn url(&self) -> Url { - path2url(self.root()) + use paths::CargoPathExt; + self.root().to_url() } /// Path to an example built as a library. + /// /// `kind` should be one of: "lib", "rlib", "staticlib", "dylib", "proc-macro" - /// ex: `/path/to/cargo/target/cit/t0/foo/target/debug/examples/libex.rlib` + /// + /// ex: `$CARGO_TARGET_TMPDIR/cit/t0/foo/target/debug/examples/libex.rlib` pub fn example_lib(&self, name: &str, kind: &str) -> PathBuf { self.target_debug_dir() .join("examples") @@ -349,7 +417,8 @@ impl Project { } /// Path to a debug binary. - /// ex: `/path/to/cargo/target/cit/t0/foo/target/debug/foo` + /// + /// ex: `$CARGO_TARGET_TMPDIR/cit/t0/foo/target/debug/foo` pub fn bin(&self, b: &str) -> PathBuf { self.build_dir() .join("debug") @@ -357,7 +426,8 @@ impl Project { } /// Path to a release binary. - /// ex: `/path/to/cargo/target/cit/t0/foo/target/release/foo` + /// + /// ex: `$CARGO_TARGET_TMPDIR/cit/t0/foo/target/release/foo` pub fn release_bin(&self, b: &str) -> PathBuf { self.build_dir() .join("release") @@ -365,7 +435,8 @@ impl Project { } /// Path to a debug binary for a specific target triple. - /// ex: `/path/to/cargo/target/cit/t0/foo/target/i686-apple-darwin/debug/foo` + /// + /// ex: `$CARGO_TARGET_TMPDIR/cit/t0/foo/target/i686-apple-darwin/debug/foo` pub fn target_bin(&self, target: &str, b: &str) -> PathBuf { self.build_dir().join(target).join("debug").join(&format!( "{}{}", @@ -374,25 +445,37 @@ impl Project { )) } - /// Returns an iterator of paths matching the glob pattern, which is - /// relative to the project root. + /// Returns an iterator of paths within [`Project::root`] matching the glob pattern pub fn glob>(&self, pattern: P) -> glob::Paths { let pattern = self.root().join(pattern); glob::glob(pattern.to_str().expect("failed to convert pattern to str")) .expect("failed to glob") } - /// Changes the contents of an existing file. + /// Overwrite a file with new content + /// + // # Example: + /// + /// ```no_run + /// # let p = cargo_test_support::project().build(); + /// p.change_file("src/lib.rs", "fn new_fn() {}"); + /// ``` pub fn change_file(&self, path: &str, body: &str) { FileBuilder::new(self.root().join(path), body, false).mk() } /// Creates a `ProcessBuilder` to run a program in the project /// and wrap it in an Execs to assert on the execution. - /// Example: - /// p.process(&p.bin("foo")) - /// .with_stdout("bar\n") - /// .run(); + /// + /// # Example: + /// + /// ```no_run + /// # use cargo_test_support::str; + /// # let p = cargo_test_support::project().build(); + /// p.process(&p.bin("foo")) + /// .with_stdout_data(str!["bar\n"]) + /// .run(); + /// ``` pub fn process>(&self, program: T) -> Execs { let mut p = process(program); p.cwd(self.root()); @@ -400,9 +483,17 @@ impl Project { } /// Creates a `ProcessBuilder` to run cargo. + /// /// Arguments can be separated by spaces. - /// Example: - /// p.cargo("build --bin foo").run(); + /// + /// For `cargo run`, see [`Project::rename_run`]. + /// + /// # Example: + /// + /// ```no_run + /// # let p = cargo_test_support::project().build(); + /// p.cargo("build --bin foo").run(); + /// ``` pub fn cargo(&self, cmd: &str) -> Execs { let cargo = cargo_exe(); let mut execs = self.process(&cargo); @@ -476,27 +567,41 @@ impl Project { } } -// Generates a project layout +/// Generates a project layout, see [`ProjectBuilder`] pub fn project() -> ProjectBuilder { ProjectBuilder::new(paths::root().join("foo")) } -// Generates a project layout in given directory +/// Generates a project layout in given directory, see [`ProjectBuilder`] pub fn project_in(dir: &str) -> ProjectBuilder { ProjectBuilder::new(paths::root().join(dir).join("foo")) } -// Generates a project layout inside our fake home dir +/// Generates a project layout inside our fake home dir, see [`ProjectBuilder`] pub fn project_in_home(name: &str) -> ProjectBuilder { ProjectBuilder::new(paths::home().join(name)) } // === Helpers === -pub fn main_file(println: &str, deps: &[&str]) -> String { +/// Generate a `main.rs` printing the specified text +/// +/// ```rust +/// # use cargo_test_support::main_file; +/// # mod dep { +/// # fn bar() -> &'static str { +/// # "world" +/// # } +/// # } +/// main_file( +/// r#""hello {}", dep::bar()"#, +/// &[] +/// ); +/// ``` +pub fn main_file(println: &str, externed_deps: &[&str]) -> String { let mut buf = String::new(); - for dep in deps.iter() { + for dep in externed_deps.iter() { buf.push_str(&format!("extern crate {};\n", dep)); } @@ -507,6 +612,7 @@ pub fn main_file(println: &str, deps: &[&str]) -> String { buf } +/// Path to the cargo binary pub fn cargo_exe() -> PathBuf { snapbox::cmd::cargo_bin("cargo") } @@ -518,21 +624,29 @@ pub fn cargo_exe() -> PathBuf { /// does not have access to the raw `ExitStatus` because `ProcessError` needs /// to be serializable (for the Rustc cache), and `ExitStatus` does not /// provide a constructor. -pub struct RawOutput { - pub code: Option, - pub stdout: Vec, - pub stderr: Vec, +struct RawOutput { + #[allow(dead_code)] + code: Option, + stdout: Vec, + #[allow(dead_code)] + stderr: Vec, } +/// Run and verify a [`ProcessBuilder`] +/// +/// Construct with +/// - [`execs`] +/// - [`cargo_process`] +/// - [`Project`] methods #[must_use] #[derive(Clone)] pub struct Execs { ran: bool, process_builder: Option, - expect_stdout: Option, expect_stdin: Option, - expect_stderr: Option, expect_exit_code: Option, + expect_stdout_data: Option, + expect_stderr_data: Option, expect_stdout_contains: Vec, expect_stderr_contains: Vec, expect_stdout_contains_n: Vec<(String, usize)>, @@ -541,9 +655,8 @@ pub struct Execs { expect_stdout_unordered: Vec, expect_stderr_unordered: Vec, expect_stderr_with_without: Vec<(Vec, Vec)>, - expect_json: Option, - expect_json_contains_unordered: Option, stream_output: bool, + assert: snapbox::Assert, } impl Execs { @@ -551,18 +664,23 @@ impl Execs { self.process_builder = Some(p); self } +} +/// # Configure assertions +impl Execs { /// Verifies that stdout is equal to the given lines. - /// See [`compare`] for supported patterns. - pub fn with_stdout(&mut self, expected: S) -> &mut Self { - self.expect_stdout = Some(expected.to_string()); + /// + /// See [`compare::assert_e2e`] for assertion details. + pub fn with_stdout_data(&mut self, expected: impl snapbox::IntoData) -> &mut Self { + self.expect_stdout_data = Some(expected.into_data()); self } /// Verifies that stderr is equal to the given lines. - /// See [`compare`] for supported patterns. - pub fn with_stderr(&mut self, expected: S) -> &mut Self { - self.expect_stderr = Some(expected.to_string()); + /// + /// See [`compare::assert_e2e`] for assertion details. + pub fn with_stderr_data(&mut self, expected: impl snapbox::IntoData) -> &mut Self { + self.expect_stderr_data = Some(expected.into_data()); self } @@ -592,6 +710,7 @@ impl Execs { /// its output. /// /// See [`compare`] for supported patterns. + #[deprecated(note = "replaced with `Execs::with_stdout_data(expected)`")] pub fn with_stdout_contains(&mut self, expected: S) -> &mut Self { self.expect_stdout_contains.push(expected.to_string()); self @@ -601,26 +720,18 @@ impl Execs { /// its output. /// /// See [`compare`] for supported patterns. + #[deprecated(note = "replaced with `Execs::with_stderr_data(expected)`")] pub fn with_stderr_contains(&mut self, expected: S) -> &mut Self { self.expect_stderr_contains.push(expected.to_string()); self } - /// Verifies that stdout contains the given contiguous lines somewhere in - /// its output, and should be repeated `number` times. - /// - /// See [`compare`] for supported patterns. - pub fn with_stdout_contains_n(&mut self, expected: S, number: usize) -> &mut Self { - self.expect_stdout_contains_n - .push((expected.to_string(), number)); - self - } - /// Verifies that stdout does not contain the given contiguous lines. /// /// See [`compare`] for supported patterns. /// /// See note on [`Self::with_stderr_does_not_contain`]. + #[deprecated] pub fn with_stdout_does_not_contain(&mut self, expected: S) -> &mut Self { self.expect_stdout_not_contains.push(expected.to_string()); self @@ -635,6 +746,7 @@ impl Execs { /// your test will pass without verifying the correct behavior. If /// possible, write the test first so that it fails, and then implement /// your fix/feature to make it pass. + #[deprecated] pub fn with_stderr_does_not_contain(&mut self, expected: S) -> &mut Self { self.expect_stderr_not_contains.push(expected.to_string()); self @@ -644,6 +756,7 @@ impl Execs { /// ignoring the order of the lines. /// /// See [`Execs::with_stderr_unordered`] for more details. + #[deprecated(note = "replaced with `Execs::with_stdout_data(expected.unordered())`")] pub fn with_stdout_unordered(&mut self, expected: S) -> &mut Self { self.expect_stdout_unordered.push(expected.to_string()); self @@ -670,6 +783,7 @@ impl Execs { /// /// This will randomly fail if the other crate name is `bar`, and the /// order changes. + #[deprecated(note = "replaced with `Execs::with_stderr_data(expected.unordered())`")] pub fn with_stderr_unordered(&mut self, expected: S) -> &mut Self { self.expect_stderr_unordered.push(expected.to_string()); self @@ -697,6 +811,7 @@ impl Execs { /// /// Be careful writing the `without` fragments, see note in /// `with_stderr_does_not_contain`. + #[deprecated] pub fn with_stderr_line_without( &mut self, with: &[S], @@ -707,53 +822,10 @@ impl Execs { self.expect_stderr_with_without.push((with, without)); self } +} - /// Verifies the JSON output matches the given JSON. - /// - /// This is typically used when testing cargo commands that emit JSON. - /// Each separate JSON object should be separated by a blank line. - /// Example: - /// - /// ```rust,ignore - /// assert_that( - /// p.cargo("metadata"), - /// execs().with_json(r#" - /// {"example": "abc"} - /// - /// {"example": "def"} - /// "#) - /// ); - /// ``` - /// - /// - Objects should match in the order given. - /// - The order of arrays is ignored. - /// - Strings support patterns described in [`compare`]. - /// - Use `"{...}"` to match any object. - pub fn with_json(&mut self, expected: &str) -> &mut Self { - self.expect_json = Some(expected.to_string()); - self - } - - /// Verifies JSON output contains the given objects (in any order) somewhere - /// in its output. - /// - /// CAUTION: Be very careful when using this. Make sure every object is - /// unique (not a subset of one another). Also avoid using objects that - /// could possibly match multiple output lines unless you're very sure of - /// what you are doing. - /// - /// See `with_json` for more detail. - pub fn with_json_contains_unordered(&mut self, expected: &str) -> &mut Self { - match &mut self.expect_json_contains_unordered { - None => self.expect_json_contains_unordered = Some(expected.to_string()), - Some(e) => { - e.push_str("\n\n"); - e.push_str(expected); - } - } - self - } - +/// # Configure the process +impl Execs { /// Forward subordinate process stdout/stderr to the terminal. /// Useful for printf debugging of the tests. /// CAUTION: CI will fail if you leave this in your test! @@ -807,20 +879,6 @@ impl Execs { self } - pub fn exec_with_output(&mut self) -> Result { - self.ran = true; - // TODO avoid unwrap - let p = (&self.process_builder).clone().unwrap(); - p.exec_with_output() - } - - pub fn build_command(&mut self) -> Command { - self.ran = true; - // TODO avoid unwrap - let p = (&self.process_builder).clone().unwrap(); - p.build_command() - } - /// Enables nightly features for testing /// /// The list of reasons should be why nightly cargo is needed. If it is @@ -844,6 +902,17 @@ impl Execs { self } + pub fn overlay_registry(&mut self, url: &Url, path: &str) -> &mut Self { + if let Some(ref mut p) = self.process_builder { + let env_value = format!("{}={}", url, path); + p.env( + "__CARGO_TEST_DEPENDENCY_CONFUSION_VULNERABILITY_DO_NOT_USE_THIS", + env_value, + ); + } + self + } + pub fn enable_split_debuginfo_packed(&mut self) -> &mut Self { self.env("CARGO_PROFILE_DEV_SPLIT_DEBUGINFO", "packed") .env("CARGO_PROFILE_TEST_SPLIT_DEBUGINFO", "packed") @@ -858,6 +927,23 @@ impl Execs { } self } +} + +/// # Run and verify the process +impl Execs { + pub fn exec_with_output(&mut self) -> Result { + self.ran = true; + // TODO avoid unwrap + let p = (&self.process_builder).clone().unwrap(); + p.exec_with_output() + } + + pub fn build_command(&mut self) -> Command { + self.ran = true; + // TODO avoid unwrap + let p = (&self.process_builder).clone().unwrap(); + p.build_command() + } #[track_caller] pub fn run(&mut self) { @@ -871,15 +957,6 @@ impl Execs { } } - #[track_caller] - pub fn run_expect_error(&mut self) { - self.ran = true; - let p = (&self.process_builder).clone().unwrap(); - if self.match_process(&p).is_ok() { - panic!("test was expected to fail, but succeeded running {}", p); - } - } - /// Runs the process, checks the expected output, and returns the first /// JSON object on stdout. #[track_caller] @@ -907,11 +984,12 @@ impl Execs { } } + #[track_caller] fn verify_checks_output(&self, stdout: &[u8], stderr: &[u8]) { if self.expect_exit_code.unwrap_or(0) != 0 - && self.expect_stdout.is_none() && self.expect_stdin.is_none() - && self.expect_stderr.is_none() + && self.expect_stdout_data.is_none() + && self.expect_stderr_data.is_none() && self.expect_stdout_contains.is_empty() && self.expect_stderr_contains.is_empty() && self.expect_stdout_contains_n.is_empty() @@ -920,8 +998,6 @@ impl Execs { && self.expect_stdout_unordered.is_empty() && self.expect_stderr_unordered.is_empty() && self.expect_stderr_with_without.is_empty() - && self.expect_json.is_none() - && self.expect_json_contains_unordered.is_none() { panic!( "`with_status()` is used, but no output is checked.\n\ @@ -933,6 +1009,7 @@ impl Execs { } } + #[track_caller] fn match_process(&self, process: &ProcessBuilder) -> Result { println!("running {}", process); let res = if self.stream_output { @@ -983,6 +1060,7 @@ impl Execs { } } + #[track_caller] fn match_output(&self, code: Option, stdout: &[u8], stderr: &[u8]) -> Result<()> { self.verify_checks_output(stdout, stderr); let stdout = std::str::from_utf8(stdout).expect("stdout is not utf8"); @@ -1001,11 +1079,23 @@ impl Execs { ), } - if let Some(expect_stdout) = &self.expect_stdout { - compare::match_exact(expect_stdout, stdout, "stdout", stderr, cwd)?; + if let Some(expect_stdout_data) = &self.expect_stdout_data { + if let Err(err) = self.assert.try_eq( + Some(&"stdout"), + stdout.into_data(), + expect_stdout_data.clone(), + ) { + panic!("{err}") + } } - if let Some(expect_stderr) = &self.expect_stderr { - compare::match_exact(expect_stderr, stderr, "stderr", stdout, cwd)?; + if let Some(expect_stderr_data) = &self.expect_stderr_data { + if let Err(err) = self.assert.try_eq( + Some(&"stderr"), + stderr.into_data(), + expect_stderr_data.clone(), + ) { + panic!("{err}") + } } for expect in self.expect_stdout_contains.iter() { compare::match_contains(expect, stdout, cwd)?; @@ -1031,14 +1121,6 @@ impl Execs { for (with, without) in self.expect_stderr_with_without.iter() { compare::match_with_without(stderr, with, without, cwd)?; } - - if let Some(ref expect_json) = self.expect_json { - compare::match_json(expect_json, stdout, cwd)?; - } - - if let Some(ref expected) = self.expect_json_contains_unordered { - compare::match_json_contains_unordered(expected, stdout, cwd)?; - } Ok(()) } } @@ -1051,14 +1133,15 @@ impl Drop for Execs { } } +/// Run and verify a process, see [`Execs`] pub fn execs() -> Execs { Execs { ran: false, process_builder: None, - expect_stdout: None, - expect_stderr: None, expect_stdin: None, expect_exit_code: Some(0), + expect_stdout_data: None, + expect_stderr_data: None, expect_stdout_contains: Vec::new(), expect_stderr_contains: Vec::new(), expect_stdout_contains_n: Vec::new(), @@ -1067,12 +1150,12 @@ pub fn execs() -> Execs { expect_stdout_unordered: Vec::new(), expect_stderr_unordered: Vec::new(), expect_stderr_with_without: Vec::new(), - expect_json: None, - expect_json_contains_unordered: None, stream_output: false, + assert: compare::assert_e2e(), } } +/// Generate a basic `Cargo.toml` pub fn basic_manifest(name: &str, version: &str) -> String { format!( r#" @@ -1086,6 +1169,7 @@ pub fn basic_manifest(name: &str, version: &str) -> String { ) } +/// Generate a `Cargo.toml` with the specified `bin.name` pub fn basic_bin_manifest(name: &str) -> String { format!( r#" @@ -1104,6 +1188,7 @@ pub fn basic_bin_manifest(name: &str) -> String { ) } +/// Generate a `Cargo.toml` with the specified `lib.name` pub fn basic_lib_manifest(name: &str) -> String { format!( r#" @@ -1122,10 +1207,6 @@ pub fn basic_lib_manifest(name: &str) -> String { ) } -pub fn path2url>(p: P) -> Url { - Url::from_file_path(p).ok().unwrap() -} - struct RustcInfo { verbose_version: String, host: String, @@ -1176,8 +1257,13 @@ pub fn is_nightly() -> bool { && (vv.contains("-nightly") || vv.contains("-dev")) } -pub fn process>(t: T) -> ProcessBuilder { - _process(t.as_ref()) +/// Run `$bin` in the test's environment, see [`ProcessBuilder`] +/// +/// For more on the test environment, see +/// - [`paths::root`] +/// - [`TestEnvCommandExt`] +pub fn process>(bin: T) -> ProcessBuilder { + _process(bin.as_ref()) } fn _process(t: &OsStr) -> ProcessBuilder { @@ -1187,27 +1273,27 @@ fn _process(t: &OsStr) -> ProcessBuilder { } /// Enable nightly features for testing -pub trait ChannelChanger { +pub trait ChannelChangerCommandExt { /// The list of reasons should be why nightly cargo is needed. If it is /// because of an unstable feature put the name of the feature as the reason, /// e.g. `&["print-im-a-teapot"]`. fn masquerade_as_nightly_cargo(self, _reasons: &[&str]) -> Self; } -impl ChannelChanger for &mut ProcessBuilder { +impl ChannelChangerCommandExt for &mut ProcessBuilder { fn masquerade_as_nightly_cargo(self, _reasons: &[&str]) -> Self { self.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly") } } -impl ChannelChanger for snapbox::cmd::Command { +impl ChannelChangerCommandExt for snapbox::cmd::Command { fn masquerade_as_nightly_cargo(self, _reasons: &[&str]) -> Self { self.env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly") } } /// Establish a process's test environment -pub trait TestEnv: Sized { +pub trait TestEnvCommandExt: Sized { fn test_env(mut self) -> Self { // In general just clear out all cargo-specific configuration already in the // environment. Our tests all assume a "default configuration" unless @@ -1247,7 +1333,7 @@ pub trait TestEnv: Sized { self = self .current_dir(&paths::root()) .env("HOME", paths::home()) - .env("CARGO_HOME", paths::home().join(".cargo")) + .env("CARGO_HOME", paths::cargo_home()) .env("__CARGO_TEST_ROOT", paths::global_root()) // Force Cargo to think it's on the stable channel for all tests, this // should hopefully not surprise us as we add cargo features over time and @@ -1277,6 +1363,7 @@ pub trait TestEnv: Sized { .env_remove("MFLAGS") .env_remove("MSYSTEM") // assume cmd.exe everywhere on windows .env_remove("RUSTC") + .env_remove("RUST_BACKTRACE") .env_remove("RUSTC_WORKSPACE_WRAPPER") .env_remove("RUSTC_WRAPPER") .env_remove("RUSTDOC") @@ -1286,10 +1373,6 @@ pub trait TestEnv: Sized { .env_remove("USER") // not set on some rust-lang docker images .env_remove("XDG_CONFIG_HOME") // see #2345 .env_remove("OUT_DIR"); // see #13204 - if cfg!(target_os = "macos") { - // Work-around a bug in macOS 10.15, see `link_or_copy` for details. - self = self.env("__CARGO_COPY_DONT_LINK_DO_NOT_USE_THIS", "1"); - } if cfg!(windows) { self = self.env("USERPROFILE", paths::home()); } @@ -1301,7 +1384,7 @@ pub trait TestEnv: Sized { fn env_remove(self, key: &str) -> Self; } -impl TestEnv for &mut ProcessBuilder { +impl TestEnvCommandExt for &mut ProcessBuilder { fn current_dir>(self, path: S) -> Self { let path = path.as_ref(); self.cwd(path) @@ -1314,7 +1397,7 @@ impl TestEnv for &mut ProcessBuilder { } } -impl TestEnv for snapbox::cmd::Command { +impl TestEnvCommandExt for snapbox::cmd::Command { fn current_dir>(self, path: S) -> Self { self.current_dir(path) } @@ -1327,11 +1410,11 @@ impl TestEnv for snapbox::cmd::Command { } /// Test the cargo command -pub trait CargoCommand { +pub trait CargoCommandExt { fn cargo_ui() -> Self; } -impl CargoCommand for snapbox::cmd::Command { +impl CargoCommandExt for snapbox::cmd::Command { fn cargo_ui() -> Self { Self::new(cargo_exe()) .with_assert(compare::assert_ui()) @@ -1341,7 +1424,7 @@ impl CargoCommand for snapbox::cmd::Command { } /// Add a list of arguments as a line -pub trait ArgLine: Sized { +pub trait ArgLineCommandExt: Sized { fn arg_line(mut self, s: &str) -> Self { for mut arg in s.split_whitespace() { if (arg.starts_with('"') && arg.ends_with('"')) @@ -1359,29 +1442,31 @@ pub trait ArgLine: Sized { fn arg>(self, s: S) -> Self; } -impl ArgLine for &mut ProcessBuilder { +impl ArgLineCommandExt for &mut ProcessBuilder { fn arg>(self, s: S) -> Self { self.arg(s) } } -impl ArgLine for snapbox::cmd::Command { +impl ArgLineCommandExt for snapbox::cmd::Command { fn arg>(self, s: S) -> Self { self.arg(s) } } -pub fn cargo_process(s: &str) -> Execs { +/// Run `cargo $arg_line`, see [`Execs`] +pub fn cargo_process(arg_line: &str) -> Execs { let cargo = cargo_exe(); let mut p = process(&cargo); p.env("CARGO", cargo); - p.arg_line(s); + p.arg_line(arg_line); execs().with_process_builder(p) } -pub fn git_process(s: &str) -> ProcessBuilder { +/// Run `git $arg_line`, see [`ProcessBuilder`] +pub fn git_process(arg_line: &str) -> ProcessBuilder { let mut p = process("git"); - p.arg_line(s); + p.arg_line(arg_line); p } @@ -1497,3 +1582,82 @@ where let thread = std::thread::spawn(|| f()); thread_wait_timeout(n, thread) } + +// Helper for testing dep-info files in the fingerprint dir. +#[track_caller] +pub fn assert_deps(project: &Project, fingerprint: &str, test_cb: impl Fn(&Path, &[(u8, &str)])) { + let mut files = project + .glob(fingerprint) + .map(|f| f.expect("unwrap glob result")) + // Filter out `.json` entries. + .filter(|f| f.extension().is_none()); + let info_path = files + .next() + .unwrap_or_else(|| panic!("expected 1 dep-info file at {}, found 0", fingerprint)); + assert!(files.next().is_none(), "expected only 1 dep-info file"); + let dep_info = fs::read(&info_path).unwrap(); + let dep_info = &mut &dep_info[..]; + let deps = (0..read_usize(dep_info)) + .map(|_| { + let ty = read_u8(dep_info); + let path = std::str::from_utf8(read_bytes(dep_info)).unwrap(); + let checksum_present = read_bool(dep_info); + if checksum_present { + // Read out the checksum info without using it + let _file_len = read_u64(dep_info); + let _checksum = read_bytes(dep_info); + } + (ty, path) + }) + .collect::>(); + test_cb(&info_path, &deps); + + fn read_usize(bytes: &mut &[u8]) -> usize { + let ret = &bytes[..4]; + *bytes = &bytes[4..]; + + u32::from_le_bytes(ret.try_into().unwrap()) as usize + } + + fn read_u8(bytes: &mut &[u8]) -> u8 { + let ret = bytes[0]; + *bytes = &bytes[1..]; + ret + } + + fn read_bool(bytes: &mut &[u8]) -> bool { + read_u8(bytes) != 0 + } + + fn read_u64(bytes: &mut &[u8]) -> u64 { + let ret = &bytes[..8]; + *bytes = &bytes[8..]; + + u64::from_le_bytes(ret.try_into().unwrap()) + } + + fn read_bytes<'a>(bytes: &mut &'a [u8]) -> &'a [u8] { + let n = read_usize(bytes); + let ret = &bytes[..n]; + *bytes = &bytes[n..]; + ret + } +} + +pub fn assert_deps_contains(project: &Project, fingerprint: &str, expected: &[(u8, &str)]) { + assert_deps(project, fingerprint, |info_path, entries| { + for (e_kind, e_path) in expected { + let pattern = glob::Pattern::new(e_path).unwrap(); + let count = entries + .iter() + .filter(|(kind, path)| kind == e_kind && pattern.matches(path)) + .count(); + if count != 1 { + panic!( + "Expected 1 match of {} {} in {:?}, got {}:\n{:#?}", + e_kind, e_path, info_path, count, entries + ); + } + } + }) +} diff --git a/crates/cargo-test-support/src/paths.rs b/crates/cargo-test-support/src/paths.rs index 806810b87a8..e9d51502aad 100644 --- a/crates/cargo-test-support/src/paths.rs +++ b/crates/cargo-test-support/src/paths.rs @@ -1,3 +1,5 @@ +//! Access common paths and manipulate the filesystem + use filetime::FileTime; use std::cell::RefCell; @@ -41,6 +43,9 @@ fn set_global_root(tmp_dir: Option<&'static str>) { } } +/// Path to the parent directory of all test [`root`]s +/// +/// ex: `$CARGO_TARGET_TMPDIR/cit` pub fn global_root() -> PathBuf { let lock = GLOBAL_ROOT .get_or_init(|| Default::default()) @@ -64,10 +69,12 @@ thread_local! { static TEST_ID: RefCell> = RefCell::new(None); } +/// See [`init_root`] pub struct TestIdGuard { _private: (), } +/// For test harnesses like [`crate::cargo_test`] pub fn init_root(tmp_dir: Option<&'static str>) -> TestIdGuard { static NEXT_ID: AtomicUsize = AtomicUsize::new(0); @@ -90,6 +97,9 @@ impl Drop for TestIdGuard { } } +/// Path to the test's filesystem scratchpad +/// +/// ex: `$CARGO_TARGET_TMPDIR/cit/t0` pub fn root() -> PathBuf { let id = TEST_ID.with(|n| { n.borrow().expect( @@ -103,6 +113,9 @@ pub fn root() -> PathBuf { root } +/// Path to the current test's `$HOME` +/// +/// ex: `$CARGO_TARGET_TMPDIR/cit/t0/home` pub fn home() -> PathBuf { let mut path = root(); path.push("home"); @@ -110,7 +123,17 @@ pub fn home() -> PathBuf { path } +/// Path to the current test's `$CARGO_HOME` +/// +/// ex: `$CARGO_TARGET_TMPDIR/cit/t0/home/.cargo` +pub fn cargo_home() -> PathBuf { + home().join(".cargo") +} + +/// Common path and file operations pub trait CargoPathExt { + fn to_url(&self) -> url::Url; + fn rm_rf(&self); fn mkdir_p(&self); @@ -132,6 +155,10 @@ pub trait CargoPathExt { } impl CargoPathExt for Path { + fn to_url(&self) -> url::Url { + url::Url::from_file_path(self).ok().unwrap() + } + fn rm_rf(&self) { let meta = match self.symlink_metadata() { Ok(meta) => meta, @@ -211,6 +238,30 @@ impl CargoPathExt for Path { } } +impl CargoPathExt for PathBuf { + fn to_url(&self) -> url::Url { + self.as_path().to_url() + } + + fn rm_rf(&self) { + self.as_path().rm_rf() + } + fn mkdir_p(&self) { + self.as_path().mkdir_p() + } + + fn ls_r(&self) -> Vec { + self.as_path().ls_r() + } + + fn move_in_time(&self, travel_amount: F) + where + F: Fn(i64, u32) -> (i64, u32), + { + self.as_path().move_in_time(travel_amount) + } +} + fn do_op(path: &Path, desc: &str, mut f: F) where F: FnMut(&Path) -> io::Result<()>, @@ -241,18 +292,29 @@ where /// Get the filename for a library. /// -/// `kind` should be one of: "lib", "rlib", "staticlib", "dylib", "proc-macro" +/// `kind` should be one of: +/// - `lib` +/// - `rlib` +/// - `staticlib` +/// - `dylib` +/// - `proc-macro` /// -/// For example, dynamic library named "foo" would return: -/// - macOS: "libfoo.dylib" -/// - Windows: "foo.dll" -/// - Unix: "libfoo.so" +/// # Examples +/// ``` +/// # use cargo_test_support::paths::get_lib_filename; +/// get_lib_filename("foo", "dylib"); +/// ``` +/// would return: +/// - macOS: `"libfoo.dylib"` +/// - Windows: `"foo.dll"` +/// - Unix: `"libfoo.so"` pub fn get_lib_filename(name: &str, kind: &str) -> String { let prefix = get_lib_prefix(kind); let extension = get_lib_extension(kind); format!("{}{}.{}", prefix, name, extension) } +/// See [`get_lib_filename`] for more details pub fn get_lib_prefix(kind: &str) -> &str { match kind { "lib" | "rlib" => "lib", @@ -267,6 +329,7 @@ pub fn get_lib_prefix(kind: &str) -> &str { } } +/// See [`get_lib_filename`] for more details pub fn get_lib_extension(kind: &str) -> &str { match kind { "lib" | "rlib" => "rlib", @@ -290,7 +353,7 @@ pub fn get_lib_extension(kind: &str) -> &str { } } -/// Returns the sysroot as queried from rustc. +/// Path to `rustc`s sysroot pub fn sysroot() -> String { let output = Command::new("rustc") .arg("--print=sysroot") diff --git a/crates/cargo-test-support/src/publish.rs b/crates/cargo-test-support/src/publish.rs index f850330c191..b47c73b3808 100644 --- a/crates/cargo-test-support/src/publish.rs +++ b/crates/cargo-test-support/src/publish.rs @@ -1,6 +1,66 @@ -use crate::compare::{assert_match_exact, find_json_mismatch}; +//! Helpers for testing `cargo package` / `cargo publish` +//! +//! # Example +//! +//! ```no_run +//! # use cargo_test_support::registry::RegistryBuilder; +//! # use cargo_test_support::publish::validate_upload; +//! # use cargo_test_support::project; +//! // This replaces `registry::init()` and must be called before `Package::new().publish()` +//! let registry = RegistryBuilder::new().http_api().http_index().build(); +//! +//! let p = project() +//! .file( +//! "Cargo.toml", +//! r#" +//! [package] +//! name = "foo" +//! version = "0.0.1" +//! edition = "2015" +//! authors = [] +//! license = "MIT" +//! description = "foo" +//! "#, +//! ) +//! .file("src/main.rs", "fn main() {}") +//! .build(); +//! +//! p.cargo("publish --no-verify") +//! .replace_crates_io(registry.index_url()) +//! .run(); +//! +//! validate_upload( +//! r#" +//! { +//! "authors": [], +//! "badges": {}, +//! "categories": [], +//! "deps": [], +//! "description": "foo", +//! "documentation": null, +//! "features": {}, +//! "homepage": null, +//! "keywords": [], +//! "license": "MIT", +//! "license_file": null, +//! "links": null, +//! "name": "foo", +//! "readme": null, +//! "readme_file": null, +//! "repository": null, +//! "rust_version": null, +//! "vers": "0.0.1" +//! } +//! "#, +//! "foo-0.0.1.crate", +//! &["Cargo.lock", "Cargo.toml", "Cargo.toml.orig", "src/main.rs"], +//! ); +//! ``` + +use crate::compare::assert_match_exact; use crate::registry::{self, alt_api_path, FeatureMap}; use flate2::read::GzDecoder; +use snapbox::prelude::*; use std::collections::{HashMap, HashSet}; use std::fs; use std::fs::File; @@ -17,7 +77,7 @@ where Ok(u32::from_le_bytes(buf)) } -/// Checks the result of a crate publish. +/// Check the `cargo publish` API call pub fn validate_upload(expected_json: &str, expected_crate_name: &str, expected_files: &[&str]) { let new_path = registry::api_path().join("api/v1/crates/new"); _validate_upload( @@ -29,7 +89,7 @@ pub fn validate_upload(expected_json: &str, expected_crate_name: &str, expected_ ); } -/// Checks the result of a crate publish, along with the contents of the files. +/// Check the `cargo publish` API call, with file contents pub fn validate_upload_with_contents( expected_json: &str, expected_crate_name: &str, @@ -46,7 +106,7 @@ pub fn validate_upload_with_contents( ); } -/// Checks the result of a crate publish to an alternative registry. +/// Check the `cargo publish` API call to the alternative test registry pub fn validate_alt_upload( expected_json: &str, expected_crate_name: &str, @@ -74,12 +134,8 @@ fn _validate_upload( let json_sz = read_le_u32(&mut f).expect("read json length"); let mut json_bytes = vec![0; json_sz as usize]; f.read_exact(&mut json_bytes).expect("read JSON data"); - let actual_json = serde_json::from_slice(&json_bytes).expect("uploaded JSON should be valid"); - let expected_json = serde_json::from_str(expected_json).expect("expected JSON does not parse"); - if let Err(e) = find_json_mismatch(&expected_json, &actual_json, None) { - panic!("{}", e); - } + snapbox::assert_data_eq!(json_bytes, expected_json.is_json()); // 32-bit little-endian integer of length of crate file. let crate_sz = read_le_u32(&mut f).expect("read crate length"); diff --git a/crates/cargo-test-support/src/registry.rs b/crates/cargo-test-support/src/registry.rs index f8b4b144782..9e0bc9e3ed9 100644 --- a/crates/cargo-test-support/src/registry.rs +++ b/crates/cargo-test-support/src/registry.rs @@ -1,3 +1,47 @@ +//! Interact with the [`TestRegistry`] +//! +//! # Example +//! +//! ```no_run +//! use cargo_test_support::registry::Package; +//! use cargo_test_support::project; +//! use cargo_test_support::str; +//! +//! // Publish package "a" depending on "b". +//! Package::new("a", "1.0.0") +//! .dep("b", "1.0.0") +//! .file("src/lib.rs", r#" +//! extern crate b; +//! pub fn f() -> i32 { b::f() * 2 } +//! "#) +//! .publish(); +//! +//! // Publish package "b". +//! Package::new("b", "1.0.0") +//! .file("src/lib.rs", r#" +//! pub fn f() -> i32 { 12 } +//! "#) +//! .publish(); +//! +//! // Create a project that uses package "a". +//! let p = project() +//! .file("Cargo.toml", r#" +//! [package] +//! name = "foo" +//! version = "0.0.1" +//! +//! [dependencies] +//! a = "1.0" +//! "#) +//! .file("src/main.rs", r#" +//! extern crate a; +//! fn main() { println!("{}", a::f()); } +//! "#) +//! .build(); +//! +//! p.cargo("run").with_stdout_data(str!["24"]).run(); +//! ``` + use crate::git::repo; use crate::paths; use crate::publish::{create_index_line, write_to_index}; @@ -20,39 +64,64 @@ use time::format_description::well_known::Rfc3339; use time::{Duration, OffsetDateTime}; use url::Url; -/// Gets the path to the local index pretending to be crates.io. This is a Git repo +/// Path to the local index for psuedo-crates.io. +/// +/// This is a Git repo /// initialized with a `config.json` file pointing to `dl_path` for downloads /// and `api_path` for uploads. +/// +/// ex: `$CARGO_TARGET_TMPDIR/cit/t0/registry` pub fn registry_path() -> PathBuf { generate_path("registry") } -/// Gets the path for local web API uploads. Cargo will place the contents of a web API + +/// Path to the local web API uploads +/// +/// Cargo will place the contents of a web API /// request here. For example, `api/v1/crates/new` is the result of publishing a crate. +/// +/// ex: `$CARGO_TARGET_TMPDIR/cit/t0/api` pub fn api_path() -> PathBuf { generate_path("api") } -/// Gets the path where crates can be downloaded using the web API endpoint. Crates + +/// Path to download `.crate` files using the web API endpoint. +/// +/// Crates /// should be organized as `{name}/{version}/download` to match the web API /// endpoint. This is rarely used and must be manually set up. -fn dl_path() -> PathBuf { +/// +/// ex: `$CARGO_TARGET_TMPDIR/cit/t0/dl` +pub fn dl_path() -> PathBuf { generate_path("dl") } -/// Gets the alternative-registry version of `registry_path`. -fn alt_registry_path() -> PathBuf { + +/// Path to the alternative-registry version of [`registry_path`] +/// +/// ex: `$CARGO_TARGET_TMPDIR/cit/t0/alternative-registry` +pub fn alt_registry_path() -> PathBuf { generate_path("alternative-registry") } -/// Gets the alternative-registry version of `registry_url`. + +/// URL to the alternative-registry version of `registry_url` fn alt_registry_url() -> Url { generate_url("alternative-registry") } -/// Gets the alternative-registry version of `dl_path`. + +/// Path to the alternative-registry version of [`dl_path`] +/// +/// ex: `$CARGO_TARGET_TMPDIR/cit/t0/alternative-dl` pub fn alt_dl_path() -> PathBuf { generate_path("alternative-dl") } -/// Gets the alternative-registry version of `api_path`. + +/// Path to the alternative-registry version of [`api_path`] +/// +/// ex: `$CARGO_TARGET_TMPDIR/cit/t0/alternative-api` pub fn alt_api_path() -> PathBuf { generate_path("alternative-api") } + fn generate_path(name: &str) -> PathBuf { paths::root().join(name) } @@ -60,6 +129,7 @@ fn generate_url(name: &str) -> Url { Url::from_file_path(generate_path(name)).ok().unwrap() } +/// Auth-token for publishing, see [`RegistryBuilder::token`] #[derive(Clone)] pub enum Token { Plaintext(String), @@ -68,6 +138,7 @@ pub enum Token { impl Token { /// This is a valid PASETO secret key. + /// /// This one is already publicly available as part of the text of the RFC so is safe to use for tests. pub fn rfc_key() -> Token { Token::Keys( @@ -80,7 +151,9 @@ impl Token { type RequestCallback = Box Response>; -/// A builder for initializing registries. +/// Prepare a local [`TestRegistry`] fixture +/// +/// See also [`init`] and [`alt_init`] pub struct RegistryBuilder { /// If set, configures an alternate registry with the given name. alternative: Option, @@ -108,6 +181,9 @@ pub struct RegistryBuilder { credential_provider: Option, } +/// A local registry fixture +/// +/// Most tests won't need to call this directly but instead interact with [`Package`] pub struct TestRegistry { server: Option, index_url: Url, @@ -279,7 +355,7 @@ impl RegistryBuilder { /// Initializes the registry. #[must_use] pub fn build(self) -> TestRegistry { - let config_path = paths::home().join(".cargo/config.toml"); + let config_path = paths::cargo_home().join("config.toml"); t!(fs::create_dir_all(config_path.parent().unwrap())); let prefix = if let Some(alternative) = &self.alternative { format!("{alternative}-") @@ -391,7 +467,7 @@ impl RegistryBuilder { } if self.configure_token { - let credentials = paths::home().join(".cargo/credentials.toml"); + let credentials = paths::cargo_home().join("credentials.toml"); match ®istry.token { Token::Plaintext(token) => { if let Some(alternative) = &self.alternative { @@ -459,71 +535,31 @@ impl RegistryBuilder { } } -/// A builder for creating a new package in a registry. +/// Published package builder for [`TestRegistry`] /// /// This uses "source replacement" using an automatically generated /// `.cargo/config` file to ensure that dependencies will use these packages /// instead of contacting crates.io. See `source-replacement.md` for more /// details on how source replacement works. /// -/// Call `publish` to finalize and create the package. +/// Call [`Package::publish`] to finalize and create the package. /// /// If no files are specified, an empty `lib.rs` file is automatically created. /// /// The `Cargo.toml` file is automatically generated based on the methods -/// called on `Package` (for example, calling `dep()` will add to the +/// called on `Package` (for example, calling [`Package::dep()`] will add to the /// `[dependencies]` automatically). You may also specify a `Cargo.toml` file /// to override the generated one. /// /// This supports different registry types: /// - Regular source replacement that replaces `crates.io` (the default). /// - A "local registry" which is a subset for vendoring (see -/// `Package::local`). +/// [`Package::local`]). /// - An "alternative registry" which requires specifying the registry name -/// (see `Package::alternative`). +/// (see [`Package::alternative`]). /// /// This does not support "directory sources". See `directory.rs` for /// `VendorPackage` which implements directory sources. -/// -/// # Example -/// ```no_run -/// use cargo_test_support::registry::Package; -/// use cargo_test_support::project; -/// -/// // Publish package "a" depending on "b". -/// Package::new("a", "1.0.0") -/// .dep("b", "1.0.0") -/// .file("src/lib.rs", r#" -/// extern crate b; -/// pub fn f() -> i32 { b::f() * 2 } -/// "#) -/// .publish(); -/// -/// // Publish package "b". -/// Package::new("b", "1.0.0") -/// .file("src/lib.rs", r#" -/// pub fn f() -> i32 { 12 } -/// "#) -/// .publish(); -/// -/// // Create a project that uses package "a". -/// let p = project() -/// .file("Cargo.toml", r#" -/// [package] -/// name = "foo" -/// version = "0.0.1" -/// -/// [dependencies] -/// a = "1.0" -/// "#) -/// .file("src/main.rs", r#" -/// extern crate a; -/// fn main() { println!("{}", a::f()); } -/// "#) -/// .build(); -/// -/// p.cargo("run").with_stdout("24").run(); -/// ``` #[must_use] pub struct Package { name: String, @@ -544,6 +580,7 @@ pub struct Package { pub(crate) type FeatureMap = BTreeMap>; +/// Published package dependency builder, see [`Package::add_dep`] #[derive(Clone)] pub struct Dependency { name: String, @@ -582,14 +619,18 @@ struct PackageFile { const DEFAULT_MODE: u32 = 0o644; -/// Initializes the on-disk registry and sets up the config so that crates.io -/// is replaced with the one on disk. +/// Setup a local psuedo-crates.io [`TestRegistry`] +/// +/// This is implicitly called by [`Package::new`]. +/// +/// When calling `cargo publish`, see instead [`crate::publish`]. pub fn init() -> TestRegistry { RegistryBuilder::new().build() } -/// Variant of `init` that initializes the "alternative" registry and crates.io -/// replacement. +/// Setup a local "alternative" [`TestRegistry`] +/// +/// When calling `cargo publish`, see instead [`crate::publish`]. pub fn alt_init() -> TestRegistry { init(); RegistryBuilder::new().alternative().build() @@ -1195,7 +1236,7 @@ impl Package { /// Creates a new package builder. /// Call `publish()` to finalize and build the package. pub fn new(name: &str, vers: &str) -> Package { - let config = paths::home().join(".cargo/config.toml"); + let config = paths::cargo_home().join("config.toml"); if !config.exists() { init(); } @@ -1234,6 +1275,8 @@ impl Package { /// See `src/doc/src/reference/registries.md` for more details on /// alternative registries. See `alt_registry.rs` for the tests that use /// this. + /// + /// **Requires:** [`alt_init`] pub fn alternative(&mut self, alternative: bool) -> &mut Package { self.alternative = alternative; self @@ -1470,6 +1513,7 @@ impl Package { t!(fs::create_dir_all(dst.parent().unwrap())); let f = t!(File::create(&dst)); let mut a = Builder::new(GzEncoder::new(f, Compression::none())); + a.sparse(false); if !self .files @@ -1649,7 +1693,12 @@ impl Package { /// Returns the path to the compressed package file. pub fn archive_dst(&self) -> PathBuf { if self.local { - registry_path().join(format!("{}-{}.crate", self.name, self.vers)) + let path = if self.alternative { + alt_registry_path() + } else { + registry_path() + }; + path.join(format!("{}-{}.crate", self.name, self.vers)) } else if self.alternative { alt_dl_path() .join(&self.name) @@ -1661,6 +1710,7 @@ impl Package { } } +/// Generate a checksum pub fn cksum(s: &[u8]) -> String { Sha256::new().update(s).finish_hex() } diff --git a/crates/cargo-util-schemas/Cargo.toml b/crates/cargo-util-schemas/Cargo.toml index 1b72fd30bf1..87d133b490a 100644 --- a/crates/cargo-util-schemas/Cargo.toml +++ b/crates/cargo-util-schemas/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "cargo-util-schemas" -version = "0.3.1" -rust-version = "1.77" # MSRV:1 +version = "0.7.0" +rust-version = "1.81" # MSRV:1 edition.workspace = true license.workspace = true homepage.workspace = true diff --git a/crates/cargo-util-schemas/README.md b/crates/cargo-util-schemas/README.md new file mode 100644 index 00000000000..844bc081f70 --- /dev/null +++ b/crates/cargo-util-schemas/README.md @@ -0,0 +1,2 @@ +> This crate is maintained by the Cargo team for use by the wider +> ecosystem. This crate follows semver compatibility for its APIs. diff --git a/crates/cargo-util-schemas/src/core/package_id_spec.rs b/crates/cargo-util-schemas/src/core/package_id_spec.rs index 72d72149e2a..674d804ccb8 100644 --- a/crates/cargo-util-schemas/src/core/package_id_spec.rs +++ b/crates/cargo-util-schemas/src/core/package_id_spec.rs @@ -94,13 +94,8 @@ impl PackageIdSpec { .into()); } } - let mut parts = spec.splitn(2, [':', '@']); - let name = parts.next().unwrap(); - let version = match parts.next() { - Some(version) => Some(version.parse::()?), - None => None, - }; - PackageName::new(name)?; + let (name, version) = parse_spec(spec)?.unwrap_or_else(|| (spec.to_owned(), None)); + PackageName::new(&name)?; Ok(PackageIdSpec { name: String::from(name), version, @@ -161,11 +156,8 @@ impl PackageIdSpec { return Err(ErrorKind::MissingUrlPath(url).into()); }; match frag { - Some(fragment) => match fragment.split_once([':', '@']) { - Some((name, part)) => { - let version = part.parse::()?; - (String::from(name), Some(version)) - } + Some(fragment) => match parse_spec(&fragment)? { + Some((name, ver)) => (name, ver), None => { if fragment.chars().next().unwrap().is_alphabetic() { (String::from(fragment.as_str()), None) @@ -217,6 +209,18 @@ impl PackageIdSpec { } } +fn parse_spec(spec: &str) -> Result)>> { + let Some((name, ver)) = spec + .rsplit_once('@') + .or_else(|| spec.rsplit_once(':').filter(|(n, _)| !n.ends_with(':'))) + else { + return Ok(None); + }; + let name = name.to_owned(); + let ver = ver.parse::()?; + Ok(Some((name, Some(ver)))) +} + fn strip_url_protocol(url: &Url) -> Url { // Ridiculous hoop because `Url::set_scheme` errors when changing to http/https let raw = url.to_string(); @@ -323,18 +327,30 @@ mod tests { use crate::core::{GitReference, SourceKind}; use url::Url; + #[track_caller] + fn ok(spec: &str, expected: PackageIdSpec, expected_rendered: &str) { + let parsed = PackageIdSpec::parse(spec).unwrap(); + assert_eq!(parsed, expected); + let rendered = parsed.to_string(); + assert_eq!(rendered, expected_rendered); + let reparsed = PackageIdSpec::parse(&rendered).unwrap(); + assert_eq!(reparsed, expected); + } + + macro_rules! err { + ($spec:expr, $expected:pat) => { + let err = PackageIdSpec::parse($spec).unwrap_err(); + let kind = err.0; + assert!( + matches!(kind, $expected), + "`{}` parse error mismatch, got {kind:?}", + $spec + ); + }; + } + #[test] fn good_parsing() { - #[track_caller] - fn ok(spec: &str, expected: PackageIdSpec, expected_rendered: &str) { - let parsed = PackageIdSpec::parse(spec).unwrap(); - assert_eq!(parsed, expected); - let rendered = parsed.to_string(); - assert_eq!(rendered, expected_rendered); - let reparsed = PackageIdSpec::parse(&rendered).unwrap(); - assert_eq!(reparsed, expected); - } - ok( "https://crates.io/foo", PackageIdSpec { @@ -425,6 +441,16 @@ mod tests { }, "foo", ); + ok( + "foo::bar", + PackageIdSpec { + name: String::from("foo::bar"), + version: None, + url: None, + kind: None, + }, + "foo::bar", + ); ok( "foo:1.2.3", PackageIdSpec { @@ -435,6 +461,16 @@ mod tests { }, "foo@1.2.3", ); + ok( + "foo::bar:1.2.3", + PackageIdSpec { + name: String::from("foo::bar"), + version: Some("1.2.3".parse().unwrap()), + url: None, + kind: None, + }, + "foo::bar@1.2.3", + ); ok( "foo@1.2.3", PackageIdSpec { @@ -445,6 +481,16 @@ mod tests { }, "foo@1.2.3", ); + ok( + "foo::bar@1.2.3", + PackageIdSpec { + name: String::from("foo::bar"), + version: Some("1.2.3".parse().unwrap()), + url: None, + kind: None, + }, + "foo::bar@1.2.3", + ); ok( "foo@1.2", PackageIdSpec { @@ -579,6 +625,16 @@ mod tests { }, "file:///path/to/my/project/foo", ); + ok( + "file:///path/to/my/project/foo::bar", + PackageIdSpec { + name: String::from("foo::bar"), + version: None, + url: Some(Url::parse("file:///path/to/my/project/foo::bar").unwrap()), + kind: None, + }, + "file:///path/to/my/project/foo::bar", + ); ok( "file:///path/to/my/project/foo#1.1.8", PackageIdSpec { @@ -599,29 +655,77 @@ mod tests { }, "path+file:///path/to/my/project/foo#1.1.8", ); + ok( + "path+file:///path/to/my/project/foo#bar", + PackageIdSpec { + name: String::from("bar"), + version: None, + url: Some(Url::parse("file:///path/to/my/project/foo").unwrap()), + kind: Some(SourceKind::Path), + }, + "path+file:///path/to/my/project/foo#bar", + ); + ok( + "path+file:///path/to/my/project/foo#foo::bar", + PackageIdSpec { + name: String::from("foo::bar"), + version: None, + url: Some(Url::parse("file:///path/to/my/project/foo").unwrap()), + kind: Some(SourceKind::Path), + }, + "path+file:///path/to/my/project/foo#foo::bar", + ); + ok( + "path+file:///path/to/my/project/foo#bar:1.1.8", + PackageIdSpec { + name: String::from("bar"), + version: Some("1.1.8".parse().unwrap()), + url: Some(Url::parse("file:///path/to/my/project/foo").unwrap()), + kind: Some(SourceKind::Path), + }, + "path+file:///path/to/my/project/foo#bar@1.1.8", + ); + ok( + "path+file:///path/to/my/project/foo#foo::bar:1.1.8", + PackageIdSpec { + name: String::from("foo::bar"), + version: Some("1.1.8".parse().unwrap()), + url: Some(Url::parse("file:///path/to/my/project/foo").unwrap()), + kind: Some(SourceKind::Path), + }, + "path+file:///path/to/my/project/foo#foo::bar@1.1.8", + ); + ok( + "path+file:///path/to/my/project/foo#bar@1.1.8", + PackageIdSpec { + name: String::from("bar"), + version: Some("1.1.8".parse().unwrap()), + url: Some(Url::parse("file:///path/to/my/project/foo").unwrap()), + kind: Some(SourceKind::Path), + }, + "path+file:///path/to/my/project/foo#bar@1.1.8", + ); + ok( + "path+file:///path/to/my/project/foo#foo::bar@1.1.8", + PackageIdSpec { + name: String::from("foo::bar"), + version: Some("1.1.8".parse().unwrap()), + url: Some(Url::parse("file:///path/to/my/project/foo").unwrap()), + kind: Some(SourceKind::Path), + }, + "path+file:///path/to/my/project/foo#foo::bar@1.1.8", + ); } #[test] fn bad_parsing() { - macro_rules! err { - ($spec:expr, $expected:pat) => { - let err = PackageIdSpec::parse($spec).unwrap_err(); - let kind = err.0; - assert!( - matches!(kind, $expected), - "`{}` parse error mismatch, got {kind:?}", - $spec - ); - }; - } - err!("baz:", ErrorKind::PartialVersion(_)); err!("baz:*", ErrorKind::PartialVersion(_)); err!("baz@", ErrorKind::PartialVersion(_)); err!("baz@*", ErrorKind::PartialVersion(_)); err!("baz@^1.0", ErrorKind::PartialVersion(_)); - err!("https://baz:1.0", ErrorKind::PartialVersion(_)); - err!("https://#baz:1.0", ErrorKind::PartialVersion(_)); + err!("https://baz:1.0", ErrorKind::NameValidation(_)); + err!("https://#baz:1.0", ErrorKind::NameValidation(_)); err!( "foobar+https://github.com/rust-lang/crates.io-index", ErrorKind::UnsupportedProtocol(_) diff --git a/crates/cargo-util-schemas/src/core/partial_version.rs b/crates/cargo-util-schemas/src/core/partial_version.rs index 5057d6046e1..00252f56aa0 100644 --- a/crates/cargo-util-schemas/src/core/partial_version.rs +++ b/crates/cargo-util-schemas/src/core/partial_version.rs @@ -186,6 +186,7 @@ enum ErrorKind { #[cfg(test)] mod test { use super::*; + use snapbox::prelude::*; use snapbox::str; #[test] @@ -202,7 +203,7 @@ mod test { Ok(result) => result.to_string(), Err(err) => format!("didn't pass: {err}"), }; - snapbox::assert_eq(expected.clone(), actual); + snapbox::assert_data_eq!(actual, expected.clone().raw()); } } @@ -241,7 +242,7 @@ mod test { Ok(result) => format!("didn't fail: {result:?}"), Err(err) => err.to_string(), }; - snapbox::assert_eq(expected.clone(), actual); + snapbox::assert_data_eq!(actual, expected.clone().raw()); } } } diff --git a/crates/cargo-util-schemas/src/lib.rs b/crates/cargo-util-schemas/src/lib.rs index 84b6c39a89b..910438155e9 100644 --- a/crates/cargo-util-schemas/src/lib.rs +++ b/crates/cargo-util-schemas/src/lib.rs @@ -4,6 +4,9 @@ //! parsing command-lines. //! Any logic for getting final semantics from these will likely need other tools to process, like //! `cargo metadata`. +//! +//! > This crate is maintained by the Cargo team for use by the wider +//! > ecosystem. This crate follows semver compatibility for its APIs. pub mod core; pub mod manifest; diff --git a/crates/cargo-util-schemas/src/manifest/mod.rs b/crates/cargo-util-schemas/src/manifest/mod.rs index 94013794b8b..0fa5be5c12d 100644 --- a/crates/cargo-util-schemas/src/manifest/mod.rs +++ b/crates/cargo-util-schemas/src/manifest/mod.rs @@ -51,7 +51,7 @@ pub struct TomlManifest { pub replace: Option>, pub patch: Option>>, pub workspace: Option, - pub badges: Option, + pub badges: Option>>, pub lints: Option, /// Report unused keys (see also nested `_unused_keys`) @@ -106,18 +106,12 @@ impl TomlManifest { self.features.as_ref() } - pub fn resolved_badges( - &self, - ) -> Result>>, UnresolvedError> { - self.badges.as_ref().map(|l| l.resolved()).transpose() - } - - pub fn resolved_lints(&self) -> Result, UnresolvedError> { - self.lints.as_ref().map(|l| l.resolved()).transpose() + pub fn normalized_lints(&self) -> Result, UnresolvedError> { + self.lints.as_ref().map(|l| l.normalized()).transpose() } } -#[derive(Debug, Deserialize, Serialize, Clone)] +#[derive(Debug, Default, Deserialize, Serialize, Clone)] #[serde(rename_all = "kebab-case")] pub struct TomlWorkspace { pub members: Option>, @@ -179,6 +173,7 @@ pub struct TomlPackage { pub publish: Option, pub workspace: Option, pub im_a_teapot: Option, + pub autolib: Option, pub autobins: Option, pub autoexamples: Option, pub autotests: Option, @@ -205,79 +200,133 @@ pub struct TomlPackage { } impl TomlPackage { - pub fn resolved_edition(&self) -> Result, UnresolvedError> { - self.edition.as_ref().map(|v| v.resolved()).transpose() + pub fn new(name: PackageName) -> Self { + Self { + name, + + edition: None, + rust_version: None, + version: None, + authors: None, + build: None, + metabuild: None, + default_target: None, + forced_target: None, + links: None, + exclude: None, + include: None, + publish: None, + workspace: None, + im_a_teapot: None, + autolib: None, + autobins: None, + autoexamples: None, + autotests: None, + autobenches: None, + default_run: None, + description: None, + homepage: None, + documentation: None, + readme: None, + keywords: None, + categories: None, + license: None, + license_file: None, + repository: None, + resolver: None, + metadata: None, + _invalid_cargo_features: None, + } } - pub fn resolved_rust_version(&self) -> Result, UnresolvedError> { - self.rust_version.as_ref().map(|v| v.resolved()).transpose() + pub fn normalized_edition(&self) -> Result, UnresolvedError> { + self.edition.as_ref().map(|v| v.normalized()).transpose() } - pub fn resolved_version(&self) -> Result, UnresolvedError> { - self.version.as_ref().map(|v| v.resolved()).transpose() + pub fn normalized_rust_version(&self) -> Result, UnresolvedError> { + self.rust_version + .as_ref() + .map(|v| v.normalized()) + .transpose() } - pub fn resolved_authors(&self) -> Result>, UnresolvedError> { - self.authors.as_ref().map(|v| v.resolved()).transpose() + pub fn normalized_version(&self) -> Result, UnresolvedError> { + self.version.as_ref().map(|v| v.normalized()).transpose() } - pub fn resolved_exclude(&self) -> Result>, UnresolvedError> { - self.exclude.as_ref().map(|v| v.resolved()).transpose() + pub fn normalized_authors(&self) -> Result>, UnresolvedError> { + self.authors.as_ref().map(|v| v.normalized()).transpose() } - pub fn resolved_include(&self) -> Result>, UnresolvedError> { - self.include.as_ref().map(|v| v.resolved()).transpose() + pub fn normalized_build(&self) -> Result, UnresolvedError> { + let readme = self.build.as_ref().ok_or(UnresolvedError)?; + match readme { + StringOrBool::Bool(false) => Ok(None), + StringOrBool::Bool(true) => Err(UnresolvedError), + StringOrBool::String(value) => Ok(Some(value)), + } } - pub fn resolved_publish(&self) -> Result, UnresolvedError> { - self.publish.as_ref().map(|v| v.resolved()).transpose() + pub fn normalized_exclude(&self) -> Result>, UnresolvedError> { + self.exclude.as_ref().map(|v| v.normalized()).transpose() } - pub fn resolved_description(&self) -> Result, UnresolvedError> { - self.description.as_ref().map(|v| v.resolved()).transpose() + pub fn normalized_include(&self) -> Result>, UnresolvedError> { + self.include.as_ref().map(|v| v.normalized()).transpose() } - pub fn resolved_homepage(&self) -> Result, UnresolvedError> { - self.homepage.as_ref().map(|v| v.resolved()).transpose() + pub fn normalized_publish(&self) -> Result, UnresolvedError> { + self.publish.as_ref().map(|v| v.normalized()).transpose() } - pub fn resolved_documentation(&self) -> Result, UnresolvedError> { - self.documentation + pub fn normalized_description(&self) -> Result, UnresolvedError> { + self.description .as_ref() - .map(|v| v.resolved()) + .map(|v| v.normalized()) .transpose() } - pub fn resolved_readme(&self) -> Result, UnresolvedError> { - self.readme + pub fn normalized_homepage(&self) -> Result, UnresolvedError> { + self.homepage.as_ref().map(|v| v.normalized()).transpose() + } + + pub fn normalized_documentation(&self) -> Result, UnresolvedError> { + self.documentation .as_ref() - .map(|v| { - v.resolved().and_then(|sb| match sb { - StringOrBool::Bool(_) => Err(UnresolvedError), - StringOrBool::String(value) => Ok(value), - }) - }) + .map(|v| v.normalized()) .transpose() } - pub fn resolved_keywords(&self) -> Result>, UnresolvedError> { - self.keywords.as_ref().map(|v| v.resolved()).transpose() + pub fn normalized_readme(&self) -> Result, UnresolvedError> { + let readme = self.readme.as_ref().ok_or(UnresolvedError)?; + readme.normalized().and_then(|sb| match sb { + StringOrBool::Bool(false) => Ok(None), + StringOrBool::Bool(true) => Err(UnresolvedError), + StringOrBool::String(value) => Ok(Some(value)), + }) } - pub fn resolved_categories(&self) -> Result>, UnresolvedError> { - self.categories.as_ref().map(|v| v.resolved()).transpose() + pub fn normalized_keywords(&self) -> Result>, UnresolvedError> { + self.keywords.as_ref().map(|v| v.normalized()).transpose() } - pub fn resolved_license(&self) -> Result, UnresolvedError> { - self.license.as_ref().map(|v| v.resolved()).transpose() + pub fn normalized_categories(&self) -> Result>, UnresolvedError> { + self.categories.as_ref().map(|v| v.normalized()).transpose() } - pub fn resolved_license_file(&self) -> Result, UnresolvedError> { - self.license_file.as_ref().map(|v| v.resolved()).transpose() + pub fn normalized_license(&self) -> Result, UnresolvedError> { + self.license.as_ref().map(|v| v.normalized()).transpose() } - pub fn resolved_repository(&self) -> Result, UnresolvedError> { - self.repository.as_ref().map(|v| v.resolved()).transpose() + pub fn normalized_license_file(&self) -> Result, UnresolvedError> { + self.license_file + .as_ref() + .map(|v| v.normalized()) + .transpose() + } + + pub fn normalized_repository(&self) -> Result, UnresolvedError> { + self.repository.as_ref().map(|v| v.normalized()).transpose() } } @@ -285,14 +334,14 @@ impl TomlPackage { #[derive(Serialize, Copy, Clone, Debug)] #[serde(untagged)] pub enum InheritableField { - /// The type that that is used when not inheriting from a workspace. + /// The type that is used when not inheriting from a workspace. Value(T), /// The type when inheriting from a workspace. Inherit(TomlInheritedField), } impl InheritableField { - pub fn resolved(&self) -> Result<&T, UnresolvedError> { + pub fn normalized(&self) -> Result<&T, UnresolvedError> { self.as_value().ok_or(UnresolvedError) } @@ -582,7 +631,7 @@ impl From for bool { #[derive(Serialize, Clone, Debug)] #[serde(untagged)] pub enum InheritableDependency { - /// The type that that is used when not inheriting from a workspace. + /// The type that is used when not inheriting from a workspace. Value(TomlDependency), /// The type when inheriting from a workspace. Inherit(TomlInheritedDependency), @@ -596,7 +645,7 @@ impl InheritableDependency { } } - pub fn resolved(&self) -> Result<&TomlDependency, UnresolvedError> { + pub fn normalized(&self) -> Result<&TomlDependency, UnresolvedError> { match self { InheritableDependency::Value(d) => Ok(d), InheritableDependency::Inherit(_) => Err(UnresolvedError), @@ -683,6 +732,13 @@ impl TomlDependency { } } + pub fn default_features(&self) -> Option { + match self { + TomlDependency::Detailed(d) => d.default_features(), + TomlDependency::Simple(..) => None, + } + } + pub fn unused_keys(&self) -> Vec { match self { TomlDependency::Simple(_) => vec![], @@ -722,6 +778,7 @@ pub struct TomlDetailedDependency { // `path` is relative to the file it appears in. If that's a `Cargo.toml`, it'll be relative to // that TOML file, and if it's a `.cargo/config` file, it'll be relative to that file. pub path: Option

, + pub base: Option, pub git: Option, pub branch: Option, pub tag: Option, @@ -761,6 +818,7 @@ impl Default for TomlDetailedDependency

{ registry: Default::default(), registry_index: Default::default(), path: Default::default(), + base: Default::default(), git: Default::default(), branch: Default::default(), tag: Default::default(), @@ -1032,7 +1090,7 @@ impl<'de> de::Deserialize<'de> for TomlDebugInfo { D: de::Deserializer<'de>, { use serde::de::Error as _; - let expecting = "a boolean, 0, 1, 2, \"line-tables-only\", or \"line-directives-only\""; + let expecting = "a boolean, 0, 1, 2, \"none\", \"limited\", \"full\", \"line-tables-only\", or \"line-directives-only\""; UntaggedEnumVisitor::new() .expecting(expecting) .bool(|value| { @@ -1216,7 +1274,6 @@ pub struct TomlTarget { pub doctest: Option, pub bench: Option, pub doc: Option, - pub plugin: Option, pub doc_scrape_examples: Option, pub proc_macro: Option, #[serde(rename = "proc_macro")] @@ -1360,6 +1417,16 @@ impl> FeatureName { } } +str_newtype!(PathBaseName); + +impl> PathBaseName { + /// Validated path base name + pub fn new(name: T) -> Result { + restricted_names::validate_path_base_name(name.as_ref())?; + Ok(Self(name)) + } +} + /// Corresponds to a `target` entry, but `TomlTarget` is already used. #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(rename_all = "kebab-case")] @@ -1396,7 +1463,7 @@ pub struct InheritableLints { } impl InheritableLints { - pub fn resolved(&self) -> Result<&TomlLints, UnresolvedError> { + pub fn normalized(&self) -> Result<&TomlLints, UnresolvedError> { if self.workspace { Err(UnresolvedError) } else { @@ -1496,6 +1563,13 @@ impl TomlLint { Self::Config(config) => config.priority, } } + + pub fn config(&self) -> Option<&toml::Table> { + match self { + Self::Level(_) => None, + Self::Config(config) => Some(&config.config), + } + } } #[derive(Serialize, Deserialize, Debug, Clone)] @@ -1504,6 +1578,8 @@ pub struct TomlLintConfig { pub level: TomlLintLevel, #[serde(default)] pub priority: i8, + #[serde(flatten)] + pub config: toml::Table, } #[derive(Serialize, Deserialize, Debug, Copy, Clone, Eq, PartialEq)] diff --git a/crates/cargo-util-schemas/src/manifest/rust_version.rs b/crates/cargo-util-schemas/src/manifest/rust_version.rs index 5c40097737f..7afcf92bd80 100644 --- a/crates/cargo-util-schemas/src/manifest/rust_version.rs +++ b/crates/cargo-util-schemas/src/manifest/rust_version.rs @@ -106,6 +106,7 @@ enum RustVersionErrorKind { #[cfg(test)] mod test { use super::*; + use snapbox::prelude::*; use snapbox::str; #[test] @@ -212,7 +213,7 @@ mod test { Ok(result) => format!("didn't fail: {result:?}"), Err(err) => err.to_string(), }; - snapbox::assert_eq(expected.clone(), actual); + snapbox::assert_data_eq!(actual, expected.clone().raw()); } } } diff --git a/crates/cargo-util-schemas/src/restricted_names.rs b/crates/cargo-util-schemas/src/restricted_names.rs index 40f22197a5b..18be53bda66 100644 --- a/crates/cargo-util-schemas/src/restricted_names.rs +++ b/crates/cargo-util-schemas/src/restricted_names.rs @@ -238,6 +238,10 @@ pub(crate) fn validate_feature_name(name: &str) -> Result<()> { Ok(()) } +pub(crate) fn validate_path_base_name(name: &str) -> Result<()> { + validate_name(name, "path base name") +} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/cargo-util/Cargo.toml b/crates/cargo-util/Cargo.toml index 28877eedaf7..554689c1a05 100644 --- a/crates/cargo-util/Cargo.toml +++ b/crates/cargo-util/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "cargo-util" -version = "0.2.12" -rust-version = "1.77" # MSRV:1 +version = "0.2.16" +rust-version = "1.81" # MSRV:1 edition.workspace = true license.workspace = true homepage.workspace = true diff --git a/crates/cargo-util/README.md b/crates/cargo-util/README.md new file mode 100644 index 00000000000..579d1a85c1e --- /dev/null +++ b/crates/cargo-util/README.md @@ -0,0 +1,3 @@ +> This crate is maintained by the Cargo team, primarily for use by Cargo +> and not intended for external use (except as a transitive dependency). This +> crate may make major changes to its APIs or be deprecated without warning. diff --git a/crates/cargo-util/src/lib.rs b/crates/cargo-util/src/lib.rs index 717e89ba469..0734118f613 100644 --- a/crates/cargo-util/src/lib.rs +++ b/crates/cargo-util/src/lib.rs @@ -1,4 +1,8 @@ //! Miscellaneous support code used by Cargo. +//! +//! > This crate is maintained by the Cargo team, primarily for use by Cargo +//! > and not intended for external use (except as a transitive dependency). This +//! > crate may make major changes to its APIs or be deprecated without warning. #![allow(clippy::disallowed_methods)] diff --git a/crates/cargo-util/src/paths.rs b/crates/cargo-util/src/paths.rs index 1172b002885..59e812f3aea 100644 --- a/crates/cargo-util/src/paths.rs +++ b/crates/cargo-util/src/paths.rs @@ -185,10 +185,34 @@ pub fn write, C: AsRef<[u8]>>(path: P, contents: C) -> Result<()> /// write_atomic uses tempfile::persist to accomplish atomic writes. pub fn write_atomic, C: AsRef<[u8]>>(path: P, contents: C) -> Result<()> { let path = path.as_ref(); + + // On unix platforms, get the permissions of the original file. Copy only the user/group/other + // read/write/execute permission bits. The tempfile lib defaults to an initial mode of 0o600, + // and we'll set the proper permissions after creating the file. + #[cfg(unix)] + let perms = path.metadata().ok().map(|meta| { + use std::os::unix::fs::PermissionsExt; + + // these constants are u16 on macOS + let mask = u32::from(libc::S_IRWXU | libc::S_IRWXG | libc::S_IRWXO); + let mode = meta.permissions().mode() & mask; + + std::fs::Permissions::from_mode(mode) + }); + let mut tmp = TempFileBuilder::new() .prefix(path.file_name().unwrap()) .tempfile_in(path.parent().unwrap())?; tmp.write_all(contents.as_ref())?; + + // On unix platforms, set the permissions on the newly created file. We can use fchmod (called + // by the std lib; subject to change) which ignores the umask so that the new file has the same + // permissions as the old file. + #[cfg(unix)] + if let Some(perms) = perms { + tmp.as_file().set_permissions(perms)?; + } + tmp.persist(path)?; Ok(()) } @@ -493,24 +517,57 @@ fn _remove_dir(p: &Path) -> Result<()> { /// /// If the file is readonly, this will attempt to change the permissions to /// force the file to be deleted. +/// On Windows, if the file is a symlink to a directory, this will attempt to remove +/// the symlink itself. pub fn remove_file>(p: P) -> Result<()> { _remove_file(p.as_ref()) } fn _remove_file(p: &Path) -> Result<()> { - let mut err = match fs::remove_file(p) { - Ok(()) => return Ok(()), - Err(e) => e, - }; - - if err.kind() == io::ErrorKind::PermissionDenied && set_not_readonly(p).unwrap_or(false) { - match fs::remove_file(p) { - Ok(()) => return Ok(()), - Err(e) => err = e, + // For Windows, we need to check if the file is a symlink to a directory + // and remove the symlink itself by calling `remove_dir` instead of + // `remove_file`. + #[cfg(target_os = "windows")] + { + use std::os::windows::fs::FileTypeExt; + let metadata = symlink_metadata(p)?; + let file_type = metadata.file_type(); + if file_type.is_symlink_dir() { + return remove_symlink_dir_with_permission_check(p); } } - Err(err).with_context(|| format!("failed to remove file `{}`", p.display())) + remove_file_with_permission_check(p) +} + +#[cfg(target_os = "windows")] +fn remove_symlink_dir_with_permission_check(p: &Path) -> Result<()> { + remove_with_permission_check(fs::remove_dir, p) + .with_context(|| format!("failed to remove symlink dir `{}`", p.display())) +} + +fn remove_file_with_permission_check(p: &Path) -> Result<()> { + remove_with_permission_check(fs::remove_file, p) + .with_context(|| format!("failed to remove file `{}`", p.display())) +} + +fn remove_with_permission_check(remove_func: F, p: P) -> io::Result<()> +where + F: Fn(P) -> io::Result<()>, + P: AsRef + Clone, +{ + match remove_func(p.clone()) { + Ok(()) => Ok(()), + Err(e) => { + if e.kind() == io::ErrorKind::PermissionDenied + && set_not_readonly(p.as_ref()).unwrap_or(false) + { + remove_func(p) + } else { + Err(e) + } + } + } } fn set_not_readonly(p: &Path) -> io::Result { @@ -565,27 +622,35 @@ fn _link_or_copy(src: &Path, dst: &Path) -> Result<()> { src }; symlink(src, dst) - } else if env::var_os("__CARGO_COPY_DONT_LINK_DO_NOT_USE_THIS").is_some() { - // This is a work-around for a bug in macOS 10.15. When running on - // APFS, there seems to be a strange race condition with - // Gatekeeper where it will forcefully kill a process launched via - // `cargo run` with SIGKILL. Copying seems to avoid the problem. - // This shouldn't affect anyone except Cargo's test suite because - // it is very rare, and only seems to happen under heavy load and - // rapidly creating lots of executables and running them. - // See https://github.com/rust-lang/cargo/issues/7821 for the - // gory details. - fs::copy(src, dst).map(|_| ()) } else { if cfg!(target_os = "macos") { - // This is a work-around for a bug on macos. There seems to be a race condition - // with APFS when hard-linking binaries. Gatekeeper does not have signing or - // hash information stored in kernel when running the process. Therefore killing it. - // This problem does not appear when copying files as kernel has time to process it. - // Note that: fs::copy on macos is using CopyOnWrite (syscall fclonefileat) which should be - // as fast as hardlinking. - // See https://github.com/rust-lang/cargo/issues/10060 for the details - fs::copy(src, dst).map(|_| ()) + // There seems to be a race condition with APFS when hard-linking + // binaries. Gatekeeper does not have signing or hash information + // stored in kernel when running the process. Therefore killing it. + // This problem does not appear when copying files as kernel has + // time to process it. Note that: fs::copy on macos is using + // CopyOnWrite (syscall fclonefileat) which should be as fast as + // hardlinking. See these issues for the details: + // + // * https://github.com/rust-lang/cargo/issues/7821 + // * https://github.com/rust-lang/cargo/issues/10060 + fs::copy(src, dst).map_or_else( + |e| { + if e.raw_os_error() + .map_or(false, |os_err| os_err == 35 /* libc::EAGAIN */) + { + tracing::info!("copy failed {e:?}. falling back to fs::hard_link"); + + // Working around an issue copying too fast with zfs (probably related to + // https://github.com/openzfsonosx/zfs/issues/809) + // See https://github.com/rust-lang/cargo/issues/13838 + fs::hard_link(src, dst) + } else { + Err(e) + } + }, + |_| Ok(()), + ) } else { fs::hard_link(src, dst) } @@ -815,6 +880,32 @@ mod tests { assert_eq!(contents, original_contents); } + #[test] + #[cfg(unix)] + fn write_atomic_permissions() { + use std::os::unix::fs::PermissionsExt; + + let original_perms = std::fs::Permissions::from_mode(u32::from( + libc::S_IRWXU | libc::S_IRGRP | libc::S_IWGRP | libc::S_IROTH, + )); + + let tmp = tempfile::Builder::new().tempfile().unwrap(); + + // need to set the permissions after creating the file to avoid umask + tmp.as_file() + .set_permissions(original_perms.clone()) + .unwrap(); + + // after this call, the file at `tmp.path()` will not be the same as the file held by `tmp` + write_atomic(tmp.path(), "new").unwrap(); + assert_eq!(std::fs::read_to_string(tmp.path()).unwrap(), "new"); + + let new_perms = std::fs::metadata(tmp.path()).unwrap().permissions(); + + let mask = u32::from(libc::S_IRWXU | libc::S_IRWXG | libc::S_IRWXO); + assert_eq!(original_perms.mode(), new_perms.mode() & mask); + } + #[test] fn join_paths_lists_paths_on_error() { let valid_paths = vec!["/testing/one", "/testing/two"]; @@ -850,4 +941,50 @@ mod tests { ); } } + + #[test] + #[cfg(windows)] + fn test_remove_symlink_dir() { + use super::*; + use std::fs; + use std::os::windows::fs::symlink_dir; + + let tmpdir = tempfile::tempdir().unwrap(); + let dir_path = tmpdir.path().join("testdir"); + let symlink_path = tmpdir.path().join("symlink"); + + fs::create_dir(&dir_path).unwrap(); + + symlink_dir(&dir_path, &symlink_path).expect("failed to create symlink"); + + assert!(symlink_path.exists()); + + assert!(remove_file(symlink_path.clone()).is_ok()); + + assert!(!symlink_path.exists()); + assert!(dir_path.exists()); + } + + #[test] + #[cfg(windows)] + fn test_remove_symlink_file() { + use super::*; + use std::fs; + use std::os::windows::fs::symlink_file; + + let tmpdir = tempfile::tempdir().unwrap(); + let file_path = tmpdir.path().join("testfile"); + let symlink_path = tmpdir.path().join("symlink"); + + fs::write(&file_path, b"test").unwrap(); + + symlink_file(&file_path, &symlink_path).expect("failed to create symlink"); + + assert!(symlink_path.exists()); + + assert!(remove_file(symlink_path.clone()).is_ok()); + + assert!(!symlink_path.exists()); + assert!(file_path.exists()); + } } diff --git a/crates/crates-io/Cargo.toml b/crates/crates-io/Cargo.toml index 7407e1b3bbc..bca110f0ec8 100644 --- a/crates/crates-io/Cargo.toml +++ b/crates/crates-io/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "crates-io" -version = "0.40.2" -rust-version = "1.77" # MSRV:1 +version = "0.40.6" +rust-version = "1.81" # MSRV:1 edition.workspace = true license.workspace = true homepage.workspace = true diff --git a/crates/crates-io/README.md b/crates/crates-io/README.md new file mode 100644 index 00000000000..844bc081f70 --- /dev/null +++ b/crates/crates-io/README.md @@ -0,0 +1,2 @@ +> This crate is maintained by the Cargo team for use by the wider +> ecosystem. This crate follows semver compatibility for its APIs. diff --git a/crates/crates-io/lib.rs b/crates/crates-io/lib.rs index 5d8a8c50cc3..ae7f9c4daa3 100644 --- a/crates/crates-io/lib.rs +++ b/crates/crates-io/lib.rs @@ -1,3 +1,6 @@ +//! > This crate is maintained by the Cargo team for use by the wider +//! > ecosystem. This crate follows semver compatibility for its APIs. + use std::collections::BTreeMap; use std::fs::File; use std::io::prelude::*; diff --git a/crates/home/README.md b/crates/home/README.md index a80adbd3b35..23523f9bdb1 100644 --- a/crates/home/README.md +++ b/crates/home/README.md @@ -20,6 +20,10 @@ that Cargo and rustup store their data. See [rust-lang/rust#43321]. +> This crate is maintained by the Cargo team, primarily for use by Cargo and Rustup +> and not intended for external use. This +> crate may make major changes to its APIs or be deprecated without warning. + [rust-lang/rust#43321]: https://github.com/rust-lang/rust/issues/43321 ## License diff --git a/crates/home/src/lib.rs b/crates/home/src/lib.rs index bbe7c32ca84..534ecb2f903 100644 --- a/crates/home/src/lib.rs +++ b/crates/home/src/lib.rs @@ -16,6 +16,10 @@ //! //! See also this [discussion]. //! +//! > This crate is maintained by the Cargo team, primarily for use by Cargo and Rustup +//! > and not intended for external use. This +//! > crate may make major changes to its APIs or be deprecated without warning. +//! //! [discussion]: https://github.com/rust-lang/rust/pull/46799#issuecomment-361156935 #![allow(clippy::disallowed_methods)] diff --git a/crates/home/src/windows.rs b/crates/home/src/windows.rs index c9a63d97b99..bd21ad085c7 100644 --- a/crates/home/src/windows.rs +++ b/crates/home/src/windows.rs @@ -20,7 +20,12 @@ pub fn home_dir_inner() -> Option { fn home_dir_crt() -> Option { unsafe { let mut path = ptr::null_mut(); - match SHGetKnownFolderPath(&FOLDERID_Profile, KF_FLAG_DONT_VERIFY as u32, 0, &mut path) { + match SHGetKnownFolderPath( + &FOLDERID_Profile, + KF_FLAG_DONT_VERIFY as u32, + std::ptr::null_mut(), + &mut path, + ) { S_OK => { let path_slice = slice::from_raw_parts(path, wcslen(path)); let s = OsString::from_wide(&path_slice); diff --git a/crates/mdman/Cargo.toml b/crates/mdman/Cargo.toml index 375d8ac750f..6668d13a9b0 100644 --- a/crates/mdman/Cargo.toml +++ b/crates/mdman/Cargo.toml @@ -3,7 +3,7 @@ name = "mdman" version = "0.0.0" edition.workspace = true license.workspace = true -description = "Creates a man page page from markdown." +description = "Creates a man page from markdown." publish = false [dependencies] diff --git a/crates/mdman/README.md b/crates/mdman/README.md index e28b596ba34..9dc1553ccb0 100644 --- a/crates/mdman/README.md +++ b/crates/mdman/README.md @@ -2,6 +2,10 @@ mdman is a small utility for creating man pages from markdown text files. +> This crate is maintained by the Cargo team, primarily for use by Cargo +> and not intended for external use (except as a transitive dependency). This +> crate may make major changes to its APIs or be deprecated without warning. + ## Usage See the [man page](doc/out/mdman.md) generated by this tool. diff --git a/crates/mdman/src/format/man.rs b/crates/mdman/src/format/man.rs index edb5c05e34b..72d45d35277 100644 --- a/crates/mdman/src/format/man.rs +++ b/crates/mdman/src/format/man.rs @@ -140,7 +140,7 @@ impl<'e> ManRenderer<'e> { suppress_paragraph = true; } } - Tag::BlockQuote => { + Tag::BlockQuote(_kind) => { self.flush(); // .RS = move left margin over 3 // .ll = shrink line length @@ -356,6 +356,8 @@ impl<'e> ManRenderer<'e> { } Event::TaskListMarker(_b) => unimplemented!(), Event::InlineHtml(..) => unimplemented!(), + Event::InlineMath(..) => unimplemented!(), + Event::DisplayMath(..) => unimplemented!(), } } Ok(()) diff --git a/crates/mdman/src/format/text.rs b/crates/mdman/src/format/text.rs index 5a858fcc792..30e01d6faa9 100644 --- a/crates/mdman/src/format/text.rs +++ b/crates/mdman/src/format/text.rs @@ -137,7 +137,7 @@ impl<'e> TextRenderer<'e> { self.indent = (level as usize - 1) * 3 + 1; } } - Tag::BlockQuote => { + Tag::BlockQuote(_kind) => { self.indent += 3; } Tag::CodeBlock(_kind) => { @@ -347,6 +347,8 @@ impl<'e> TextRenderer<'e> { } Event::TaskListMarker(_b) => unimplemented!(), Event::InlineHtml(..) => unimplemented!(), + Event::InlineMath(..) => unimplemented!(), + Event::DisplayMath(..) => unimplemented!(), } } Ok(()) diff --git a/crates/mdman/src/hbs.rs b/crates/mdman/src/hbs.rs index 09726b83ce7..ca739900cf0 100644 --- a/crates/mdman/src/hbs.rs +++ b/crates/mdman/src/hbs.rs @@ -139,6 +139,9 @@ impl HelperDef for OptionHelper<'_> { // Render the block. let block = t.renders(r, gctx, rc)?; + // Windows newlines can break some rendering, so normalize. + let block = block.replace("\r\n", "\n"); + // Get the name of this page. let man_name = gctx .data() diff --git a/crates/mdman/src/lib.rs b/crates/mdman/src/lib.rs index f0b5167e80c..0b0e6e734af 100644 --- a/crates/mdman/src/lib.rs +++ b/crates/mdman/src/lib.rs @@ -1,4 +1,8 @@ //! mdman markdown to man converter. +//! +//! > This crate is maintained by the Cargo team, primarily for use by Cargo +//! > and not intended for external use (except as a transitive dependency). This +//! > crate may make major changes to its APIs or be deprecated without warning. use anyhow::{bail, Context, Error}; use pulldown_cmark::{CowStr, Event, LinkType, Options, Parser, Tag, TagEnd}; diff --git a/crates/mdman/tests/compare.rs b/crates/mdman/tests/compare.rs index 661afcd9517..b20366ddc46 100644 --- a/crates/mdman/tests/compare.rs +++ b/crates/mdman/tests/compare.rs @@ -22,7 +22,7 @@ fn run(name: &str) { name, format.extension(section) )); - snapbox::assert_eq(snapbox::Data::read_from(&expected_path, None), result); + snapbox::assert_data_eq!(result, snapbox::Data::read_from(&expected_path, None).raw()); } } diff --git a/crates/mdman/tests/compare/expected/options.1 b/crates/mdman/tests/compare/expected/options.1 index d362421e9f0..02ba1eb66bc 100644 --- a/crates/mdman/tests/compare/expected/options.1 +++ b/crates/mdman/tests/compare/expected/options.1 @@ -69,6 +69,11 @@ Flag with optional value. .RS 4 Alternate syntax for optional value (with required = for disambiguation). .RE +.sp +\fB\-\-split\-block\fR +.RS 4 +An option where the description has a \fBblock statement that is split across multiple lines\fR +.RE .SH "EXAMPLES" .sp .RS 4 diff --git a/crates/mdman/tests/compare/expected/options.md b/crates/mdman/tests/compare/expected/options.md index 19b0b443b64..0178cac2d7c 100644 --- a/crates/mdman/tests/compare/expected/options.md +++ b/crates/mdman/tests/compare/expected/options.md @@ -58,6 +58,10 @@ A description of the command.

Alternate syntax for optional value (with required = for disambiguation).
+
--split-block
+
An option where the description has a block statement that is split across multiple lines
+ + diff --git a/crates/mdman/tests/compare/expected/options.txt b/crates/mdman/tests/compare/expected/options.txt index 9bfdec67c95..f585cba466a 100644 --- a/crates/mdman/tests/compare/expected/options.txt +++ b/crates/mdman/tests/compare/expected/options.txt @@ -43,6 +43,10 @@ OPTIONS Alternate syntax for optional value (with required = for disambiguation). + --split-block + An option where the description has a block statement that is split + across multiple lines + EXAMPLES 1. An example diff --git a/crates/mdman/tests/compare/includes/options-common.md b/crates/mdman/tests/compare/includes/options-common.md index 07404e3f74c..56f727a10cb 100644 --- a/crates/mdman/tests/compare/includes/options-common.md +++ b/crates/mdman/tests/compare/includes/options-common.md @@ -11,4 +11,9 @@ Flag with optional value. Alternate syntax for optional value (with required = for disambiguation). {{/option}} +{{#option "`--split-block`"}} +An option where the description has a `block statement +that is split across multiple lines` +{{/option}} + {{/options}} diff --git a/crates/mdman/tests/invalid.rs b/crates/mdman/tests/invalid.rs index b8be1ed24c3..46f83df5152 100644 --- a/crates/mdman/tests/invalid.rs +++ b/crates/mdman/tests/invalid.rs @@ -3,15 +3,16 @@ use std::path::PathBuf; use mdman::{Format, ManMap}; +use snapbox::prelude::*; -fn run(name: &str, expected_error: &str) { +fn run(name: &str, expected_error: impl IntoData) { let input = PathBuf::from(format!("tests/invalid/{}", name)); match mdman::convert(&input, Format::Man, None, ManMap::new()) { Ok(_) => { panic!("expected {} to fail", name); } Err(e) => { - snapbox::assert_eq(expected_error, e.to_string()); + snapbox::assert_data_eq!(e.to_string(), expected_error.raw()); } } } diff --git a/crates/resolver-tests/Cargo.toml b/crates/resolver-tests/Cargo.toml index 44f90690051..b63e3ffcee5 100644 --- a/crates/resolver-tests/Cargo.toml +++ b/crates/resolver-tests/Cargo.toml @@ -6,6 +6,7 @@ publish = false [dependencies] cargo.workspace = true +cargo-platform.workspace = true cargo-util-schemas.workspace = true cargo-util.workspace = true proptest.workspace = true diff --git a/crates/resolver-tests/README.md b/crates/resolver-tests/README.md new file mode 100644 index 00000000000..72a4f7e3c5b --- /dev/null +++ b/crates/resolver-tests/README.md @@ -0,0 +1,17 @@ +# resolver-tests + +## The aim + +This crate aims to test the resolution of Cargo's resolver. It implements a [SAT solver](https://en.wikipedia.org/wiki/SAT_solver) to compare with resolution of Cargo's resolver. +This ensures that Cargo's dependency resolution is proven valid by lowering to [SAT problem](https://en.wikipedia.org/wiki/Boolean_satisfiability_problem). + +> This crate is maintained by the Cargo team, primarily for use by Cargo +> and not intended for external use (except as a transitive dependency). This +> crate may make major changes to its APIs or be deprecated without warning. + +## About the test + +The Cargo's resolver is very sensitive to what order it tries to evaluate constraints. This makes it incredibly difficult +to be sure that a handful of tests actually covers all the important permutations of decision-making. The tests not only needs +to hit all the corner cases, it needs to try all of the orders of evaluation. So we use fuzz testing to cover more permutations. + diff --git a/crates/resolver-tests/src/helpers.rs b/crates/resolver-tests/src/helpers.rs new file mode 100644 index 00000000000..2706c4d4732 --- /dev/null +++ b/crates/resolver-tests/src/helpers.rs @@ -0,0 +1,260 @@ +use std::collections::BTreeMap; +use std::fmt::Debug; +use std::sync::OnceLock; + +use cargo::core::dependency::DepKind; +use cargo::core::{Dependency, GitReference, PackageId, SourceId, Summary}; +use cargo::util::IntoUrl; + +pub trait ToDep { + fn to_dep(self) -> Dependency; + fn opt(self) -> Dependency; + fn with(self, features: &[&'static str]) -> Dependency; + fn with_default(self) -> Dependency; + fn rename(self, name: &str) -> Dependency; +} + +impl ToDep for &'static str { + fn to_dep(self) -> Dependency { + Dependency::parse(self, Some("1.0.0"), registry_loc()).unwrap() + } + fn opt(self) -> Dependency { + let mut dep = self.to_dep(); + dep.set_optional(true); + dep + } + fn with(self, features: &[&'static str]) -> Dependency { + let mut dep = self.to_dep(); + dep.set_default_features(false); + dep.set_features(features.into_iter().copied()); + dep + } + fn with_default(self) -> Dependency { + let mut dep = self.to_dep(); + dep.set_default_features(true); + dep + } + fn rename(self, name: &str) -> Dependency { + let mut dep = self.to_dep(); + dep.set_explicit_name_in_toml(name); + dep + } +} + +impl ToDep for Dependency { + fn to_dep(self) -> Dependency { + self + } + fn opt(mut self) -> Dependency { + self.set_optional(true); + self + } + fn with(mut self, features: &[&'static str]) -> Dependency { + self.set_default_features(false); + self.set_features(features.into_iter().copied()); + self + } + fn with_default(mut self) -> Dependency { + self.set_default_features(true); + self + } + fn rename(mut self, name: &str) -> Dependency { + self.set_explicit_name_in_toml(name); + self + } +} + +pub trait ToPkgId { + fn to_pkgid(&self) -> PackageId; +} + +impl ToPkgId for PackageId { + fn to_pkgid(&self) -> PackageId { + *self + } +} + +impl<'a> ToPkgId for &'a str { + fn to_pkgid(&self) -> PackageId { + PackageId::try_new(*self, "1.0.0", registry_loc()).unwrap() + } +} + +impl, U: AsRef> ToPkgId for (T, U) { + fn to_pkgid(&self) -> PackageId { + let (name, vers) = self; + PackageId::try_new(name.as_ref(), vers.as_ref(), registry_loc()).unwrap() + } +} + +#[macro_export] +macro_rules! pkg { + ($pkgid:expr => [$($deps:expr),* $(,)? ]) => ({ + use $crate::helpers::ToDep; + let d: Vec = vec![$($deps.to_dep()),*]; + $crate::helpers::pkg_dep($pkgid, d) + }); + + ($pkgid:expr) => ({ + $crate::helpers::pkg($pkgid) + }) +} + +fn registry_loc() -> SourceId { + static EXAMPLE_DOT_COM: OnceLock = OnceLock::new(); + let example_dot = EXAMPLE_DOT_COM.get_or_init(|| { + SourceId::for_registry(&"https://example.com".into_url().unwrap()).unwrap() + }); + *example_dot +} + +pub fn pkg(name: T) -> Summary { + pkg_dep(name, Vec::new()) +} + +pub fn pkg_dep(name: T, dep: Vec) -> Summary { + let pkgid = name.to_pkgid(); + let link = if pkgid.name().ends_with("-sys") { + Some(pkgid.name()) + } else { + None + }; + Summary::new(name.to_pkgid(), dep, &BTreeMap::new(), link, None).unwrap() +} + +pub fn pkg_dep_with( + name: T, + dep: Vec, + features: &[(&'static str, &[&'static str])], +) -> Summary { + let pkgid = name.to_pkgid(); + let link = if pkgid.name().ends_with("-sys") { + Some(pkgid.name()) + } else { + None + }; + let features = features + .into_iter() + .map(|&(name, values)| (name.into(), values.into_iter().map(|&v| v.into()).collect())) + .collect(); + Summary::new(name.to_pkgid(), dep, &features, link, None).unwrap() +} + +pub fn pkg_dep_link(name: T, link: &str, dep: Vec) -> Summary { + Summary::new(name.to_pkgid(), dep, &BTreeMap::new(), Some(link), None).unwrap() +} + +pub fn pkg_id(name: &str) -> PackageId { + PackageId::try_new(name, "1.0.0", registry_loc()).unwrap() +} + +pub fn pkg_id_source(name: &str, source: &str) -> PackageId { + PackageId::try_new( + name, + "1.0.0", + SourceId::for_registry(&source.into_url().unwrap()).unwrap(), + ) + .unwrap() +} + +fn pkg_id_loc(name: &str, loc: &str) -> PackageId { + let remote = loc.into_url(); + let master = GitReference::Branch("master".to_string()); + let source_id = SourceId::for_git(&remote.unwrap(), master).unwrap(); + + PackageId::try_new(name, "1.0.0", source_id).unwrap() +} + +pub fn pkg_loc(name: &str, loc: &str) -> Summary { + let link = if name.ends_with("-sys") { + Some(name) + } else { + None + }; + Summary::new( + pkg_id_loc(name, loc), + Vec::new(), + &BTreeMap::new(), + link, + None, + ) + .unwrap() +} + +pub fn remove_dep(sum: &Summary, ind: usize) -> Summary { + let mut deps = sum.dependencies().to_vec(); + deps.remove(ind); + // note: more things will need to be copied over in the future, but it works for now. + Summary::new(sum.package_id(), deps, &BTreeMap::new(), sum.links(), None).unwrap() +} + +pub fn dep(name: &str) -> Dependency { + dep_req(name, "*") +} + +pub fn dep_req(name: &str, req: &str) -> Dependency { + Dependency::parse(name, Some(req), registry_loc()).unwrap() +} + +pub fn dep_req_kind(name: &str, req: &str, kind: DepKind) -> Dependency { + let mut dep = dep_req(name, req); + dep.set_kind(kind); + dep +} + +pub fn dep_req_platform(name: &str, req: &str, platform: &str) -> Dependency { + let mut dep = dep_req(name, req); + dep.set_platform(Some(platform.parse().unwrap())); + dep +} + +pub fn dep_loc(name: &str, location: &str) -> Dependency { + let url = location.into_url().unwrap(); + let master = GitReference::Branch("master".to_string()); + let source_id = SourceId::for_git(&url, master).unwrap(); + Dependency::parse(name, Some("1.0.0"), source_id).unwrap() +} + +pub fn dep_kind(name: &str, kind: DepKind) -> Dependency { + let mut dep = dep(name); + dep.set_kind(kind); + dep +} + +pub fn dep_platform(name: &str, platform: &str) -> Dependency { + let mut dep = dep(name); + dep.set_platform(Some(platform.parse().unwrap())); + dep +} + +pub fn registry(pkgs: Vec) -> Vec { + pkgs +} + +pub fn names(names: &[P]) -> Vec { + names.iter().map(|name| name.to_pkgid()).collect() +} + +pub fn loc_names(names: &[(&'static str, &'static str)]) -> Vec { + names + .iter() + .map(|&(name, loc)| pkg_id_loc(name, loc)) + .collect() +} + +/// Assert `xs` contains `elems` +#[track_caller] +pub fn assert_contains(xs: &[A], elems: &[A]) { + for elem in elems { + assert!( + xs.contains(elem), + "missing element\nset: {xs:?}\nmissing: {elem:?}" + ); + } +} + +#[track_caller] +pub fn assert_same(a: &[A], b: &[A]) { + assert_eq!(a.len(), b.len(), "not equal\n{a:?}\n{b:?}"); + assert_contains(b, a); +} diff --git a/crates/resolver-tests/src/lib.rs b/crates/resolver-tests/src/lib.rs index 1ced997bd95..0fda8cf62c9 100644 --- a/crates/resolver-tests/src/lib.rs +++ b/crates/resolver-tests/src/lib.rs @@ -1,12 +1,15 @@ +//! > This crate is maintained by the Cargo team, primarily for use by Cargo +//! > and not intended for external use (except as a transitive dependency). This +//! > crate may make major changes to its APIs or be deprecated without warning. + #![allow(clippy::print_stderr)] -use std::cell::RefCell; +pub mod helpers; +pub mod sat; + use std::cmp::{max, min}; -use std::collections::{BTreeMap, HashMap, HashSet}; +use std::collections::{BTreeMap, HashSet}; use std::fmt; -use std::fmt::Write; -use std::rc::Rc; -use std::sync::OnceLock; use std::task::Poll; use std::time::Instant; @@ -14,44 +17,64 @@ use cargo::core::dependency::DepKind; use cargo::core::resolver::{self, ResolveOpts, VersionOrdering, VersionPreferences}; use cargo::core::Resolve; use cargo::core::ResolveVersion; +use cargo::core::SourceId; use cargo::core::{Dependency, PackageId, Registry, Summary}; -use cargo::core::{GitReference, SourceId}; use cargo::sources::source::QueryKind; use cargo::sources::IndexSummary; -use cargo::util::{CargoResult, GlobalContext, IntoUrl}; -use cargo_util_schemas::manifest::RustVersion; +use cargo::util::interning::InternedString; +use cargo::util::{CargoResult, GlobalContext}; + +use crate::helpers::{dep_req, dep_req_kind, pkg_dep, pkg_id, ToPkgId}; +use crate::sat::SatResolver; use proptest::collection::{btree_map, vec}; use proptest::prelude::*; use proptest::sample::Index; use proptest::string::string_regex; -use varisat::ExtendFormula; pub fn resolve(deps: Vec, registry: &[Summary]) -> CargoResult> { - resolve_with_global_context(deps, registry, &GlobalContext::default().unwrap()) + Ok( + resolve_with_global_context(deps, registry, &GlobalContext::default().unwrap())? + .into_iter() + .map(|(pkg, _)| pkg) + .collect(), + ) } pub fn resolve_and_validated( deps: Vec, registry: &[Summary], - sat_resolve: Option, -) -> CargoResult> { - let resolve = - resolve_with_global_context_raw(deps.clone(), registry, &GlobalContext::default().unwrap()); + sat_resolver: &mut SatResolver, +) -> CargoResult)>> { + resolve_and_validated_raw(deps, registry, pkg_id("root"), sat_resolver) +} + +// Verify that the resolution of cargo resolver can pass the verification of SAT +pub fn resolve_and_validated_raw( + deps: Vec, + registry: &[Summary], + root_pkg_id: PackageId, + sat_resolver: &mut SatResolver, +) -> CargoResult)>> { + let resolve = resolve_with_global_context_raw( + deps.clone(), + registry, + root_pkg_id, + &GlobalContext::default().unwrap(), + ); match resolve { Err(e) => { - let sat_resolve = sat_resolve.unwrap_or_else(|| SatResolve::new(registry)); - if sat_resolve.sat_resolve(&deps) { + if sat_resolver.sat_resolve(&deps) { panic!( - "the resolve err but the sat_resolve thinks this will work:\n{}", - sat_resolve.use_packages().unwrap() + "`resolve()` returned an error but the sat resolver thinks this will work:\n{}", + sat_resolver.used_packages().unwrap() ); } Err(e) } Ok(resolve) => { - let mut stack = vec![pkg_id("root")]; + let mut stack = vec![root_pkg_id]; let mut used = HashSet::new(); let mut links = HashSet::new(); while let Some(p) = stack.pop() { @@ -69,14 +92,13 @@ pub fn resolve_and_validated( })); } } - let out = resolve.sort(); + let out = collect_features(&resolve); assert_eq!(out.len(), used.len()); - let sat_resolve = sat_resolve.unwrap_or_else(|| SatResolve::new(registry)); - if !sat_resolve.sat_is_valid_solution(&out) { + if !sat_resolver.sat_is_valid_solution(&out) { panic!( - "the sat_resolve err but the resolve thinks this will work:\n{:?}", - resolve + "`resolve()` thinks this will work, but the solution is \ + invalid according to the sat resolver:\n{resolve:?}", ); } Ok(out) @@ -84,18 +106,27 @@ pub fn resolve_and_validated( } } +fn collect_features(resolve: &Resolve) -> Vec<(PackageId, Vec)> { + resolve + .sort() + .iter() + .map(|&pkg| (pkg, resolve.features(pkg).to_vec())) + .collect() +} + pub fn resolve_with_global_context( deps: Vec, registry: &[Summary], gctx: &GlobalContext, -) -> CargoResult> { - let resolve = resolve_with_global_context_raw(deps, registry, gctx)?; - Ok(resolve.sort()) +) -> CargoResult)>> { + let resolve = resolve_with_global_context_raw(deps, registry, pkg_id("root"), gctx)?; + Ok(collect_features(&resolve)) } pub fn resolve_with_global_context_raw( deps: Vec, registry: &[Summary], + root_pkg_id: PackageId, gctx: &GlobalContext, ) -> CargoResult { struct MyRegistry<'a> { @@ -157,22 +188,20 @@ pub fn resolve_with_global_context_raw( list: registry, used: HashSet::new(), }; - let summary = Summary::new( - pkg_id("root"), - deps, - &BTreeMap::new(), - None::<&String>, - None::, - ) - .unwrap(); + + let root_summary = + Summary::new(root_pkg_id, deps, &BTreeMap::new(), None::<&String>, None).unwrap(); + let opts = ResolveOpts::everything(); + let start = Instant::now(); let mut version_prefs = VersionPreferences::default(); if gctx.cli_unstable().minimal_versions { version_prefs.version_ordering(VersionOrdering::MinimumVersionsFirst) } + let resolve = resolver::resolve( - &[(summary, opts)], + &[(root_summary, opts)], &[], &mut registry, &version_prefs, @@ -180,384 +209,12 @@ pub fn resolve_with_global_context_raw( Some(gctx), ); - // The largest test in our suite takes less then 30 sec. - // So lets fail the test if we have ben running for two long. + // The largest test in our suite takes less then 30 secs. + // So let's fail the test if we have been running for more than 60 secs. assert!(start.elapsed().as_secs() < 60); resolve } -const fn num_bits() -> usize { - std::mem::size_of::() * 8 -} - -fn log_bits(x: usize) -> usize { - if x == 0 { - return 0; - } - assert!(x > 0); - (num_bits::() as u32 - x.leading_zeros()) as usize -} - -fn sat_at_most_one(solver: &mut impl varisat::ExtendFormula, vars: &[varisat::Var]) { - if vars.len() <= 1 { - return; - } else if vars.len() == 2 { - solver.add_clause(&[vars[0].negative(), vars[1].negative()]); - return; - } else if vars.len() == 3 { - solver.add_clause(&[vars[0].negative(), vars[1].negative()]); - solver.add_clause(&[vars[0].negative(), vars[2].negative()]); - solver.add_clause(&[vars[1].negative(), vars[2].negative()]); - return; - } - // use the "Binary Encoding" from - // https://www.it.uu.se/research/group/astra/ModRef10/papers/Alan%20M.%20Frisch%20and%20Paul%20A.%20Giannoros.%20SAT%20Encodings%20of%20the%20At-Most-k%20Constraint%20-%20ModRef%202010.pdf - let bits: Vec = solver.new_var_iter(log_bits(vars.len())).collect(); - for (i, p) in vars.iter().enumerate() { - for b in 0..bits.len() { - solver.add_clause(&[p.negative(), bits[b].lit(((1 << b) & i) > 0)]); - } - } -} - -fn sat_at_most_one_by_key( - cnf: &mut impl varisat::ExtendFormula, - data: impl Iterator, -) -> HashMap> { - // no two packages with the same links set - let mut by_keys: HashMap> = HashMap::new(); - for (p, v) in data { - by_keys.entry(p).or_default().push(v) - } - for key in by_keys.values() { - sat_at_most_one(cnf, key); - } - by_keys -} - -/// Resolution can be reduced to the SAT problem. So this is an alternative implementation -/// of the resolver that uses a SAT library for the hard work. This is intended to be easy to read, -/// as compared to the real resolver. -/// -/// For the subset of functionality that are currently made by `registry_strategy` this will, -/// find a valid resolution if one exists. The big thing that the real resolver does, -/// that this one does not do is work with features and optional dependencies. -/// -/// The SAT library dose not optimize for the newer version, -/// so the selected packages may not match the real resolver. -#[derive(Clone)] -pub struct SatResolve(Rc>); - -struct SatResolveInner { - solver: varisat::Solver<'static>, - var_for_is_packages_used: HashMap, - by_name: HashMap<&'static str, Vec>, -} - -impl SatResolve { - pub fn new(registry: &[Summary]) -> Self { - let mut cnf = varisat::CnfFormula::new(); - let var_for_is_packages_used: HashMap = registry - .iter() - .map(|s| (s.package_id(), cnf.new_var())) - .collect(); - - // no two packages with the same links set - sat_at_most_one_by_key( - &mut cnf, - registry - .iter() - .map(|s| (s.links(), var_for_is_packages_used[&s.package_id()])) - .filter(|(l, _)| l.is_some()), - ); - - // no two semver compatible versions of the same package - sat_at_most_one_by_key( - &mut cnf, - var_for_is_packages_used - .iter() - .map(|(p, &v)| (p.as_activations_key(), v)), - ); - - let mut by_name: HashMap<&'static str, Vec> = HashMap::new(); - - for p in registry.iter() { - by_name - .entry(p.name().as_str()) - .or_default() - .push(p.package_id()) - } - - let empty_vec = vec![]; - - // active packages need each of there `deps` to be satisfied - for p in registry.iter() { - for dep in p.dependencies() { - let mut matches: Vec = by_name - .get(dep.package_name().as_str()) - .unwrap_or(&empty_vec) - .iter() - .filter(|&p| dep.matches_id(*p)) - .map(|p| var_for_is_packages_used[&p].positive()) - .collect(); - // ^ the `dep` is satisfied or `p` is not active - matches.push(var_for_is_packages_used[&p.package_id()].negative()); - cnf.add_clause(&matches); - } - } - - let mut solver = varisat::Solver::new(); - solver.add_formula(&cnf); - - // We dont need to `solve` now. We know that "use nothing" will satisfy all the clauses so far. - // But things run faster if we let it spend some time figuring out how the constraints interact before we add assumptions. - solver - .solve() - .expect("docs say it can't error in default config"); - SatResolve(Rc::new(RefCell::new(SatResolveInner { - solver, - var_for_is_packages_used, - by_name, - }))) - } - pub fn sat_resolve(&self, deps: &[Dependency]) -> bool { - let mut s = self.0.borrow_mut(); - let mut assumption = vec![]; - let mut this_call = None; - - // the starting `deps` need to be satisfied - for dep in deps.iter() { - let empty_vec = vec![]; - let matches: Vec = s - .by_name - .get(dep.package_name().as_str()) - .unwrap_or(&empty_vec) - .iter() - .filter(|&p| dep.matches_id(*p)) - .map(|p| s.var_for_is_packages_used[p].positive()) - .collect(); - if matches.is_empty() { - return false; - } else if matches.len() == 1 { - assumption.extend_from_slice(&matches) - } else { - if this_call.is_none() { - let new_var = s.solver.new_var(); - this_call = Some(new_var); - assumption.push(new_var.positive()); - } - let mut matches = matches; - matches.push(this_call.unwrap().negative()); - s.solver.add_clause(&matches); - } - } - - s.solver.assume(&assumption); - - s.solver - .solve() - .expect("docs say it can't error in default config") - } - pub fn sat_is_valid_solution(&self, pids: &[PackageId]) -> bool { - let mut s = self.0.borrow_mut(); - for p in pids { - if p.name().as_str() != "root" && !s.var_for_is_packages_used.contains_key(p) { - return false; - } - } - let assumption: Vec<_> = s - .var_for_is_packages_used - .iter() - .map(|(p, v)| v.lit(pids.contains(p))) - .collect(); - - s.solver.assume(&assumption); - - s.solver - .solve() - .expect("docs say it can't error in default config") - } - fn use_packages(&self) -> Option { - self.0.borrow().solver.model().map(|lits| { - let lits: HashSet<_> = lits - .iter() - .filter(|l| l.is_positive()) - .map(|l| l.var()) - .collect(); - let mut out = String::new(); - out.push_str("used:\n"); - for (p, v) in self.0.borrow().var_for_is_packages_used.iter() { - if lits.contains(v) { - writeln!(&mut out, " {}", p).unwrap(); - } - } - out - }) - } -} - -pub trait ToDep { - fn to_dep(self) -> Dependency; -} - -impl ToDep for &'static str { - fn to_dep(self) -> Dependency { - Dependency::parse(self, Some("1.0.0"), registry_loc()).unwrap() - } -} - -impl ToDep for Dependency { - fn to_dep(self) -> Dependency { - self - } -} - -pub trait ToPkgId { - fn to_pkgid(&self) -> PackageId; -} - -impl ToPkgId for PackageId { - fn to_pkgid(&self) -> PackageId { - *self - } -} - -impl<'a> ToPkgId for &'a str { - fn to_pkgid(&self) -> PackageId { - PackageId::try_new(*self, "1.0.0", registry_loc()).unwrap() - } -} - -impl, U: AsRef> ToPkgId for (T, U) { - fn to_pkgid(&self) -> PackageId { - let (name, vers) = self; - PackageId::try_new(name.as_ref(), vers.as_ref(), registry_loc()).unwrap() - } -} - -#[macro_export] -macro_rules! pkg { - ($pkgid:expr => [$($deps:expr),+ $(,)* ]) => ({ - let d: Vec = vec![$($deps.to_dep()),+]; - $crate::pkg_dep($pkgid, d) - }); - - ($pkgid:expr) => ({ - $crate::pkg($pkgid) - }) -} - -fn registry_loc() -> SourceId { - static EXAMPLE_DOT_COM: OnceLock = OnceLock::new(); - let example_dot = EXAMPLE_DOT_COM.get_or_init(|| { - SourceId::for_registry(&"https://example.com".into_url().unwrap()).unwrap() - }); - *example_dot -} - -pub fn pkg(name: T) -> Summary { - pkg_dep(name, Vec::new()) -} - -pub fn pkg_dep(name: T, dep: Vec) -> Summary { - let pkgid = name.to_pkgid(); - let link = if pkgid.name().ends_with("-sys") { - Some(pkgid.name().as_str()) - } else { - None - }; - Summary::new( - name.to_pkgid(), - dep, - &BTreeMap::new(), - link, - None::, - ) - .unwrap() -} - -pub fn pkg_id(name: &str) -> PackageId { - PackageId::try_new(name, "1.0.0", registry_loc()).unwrap() -} - -fn pkg_id_loc(name: &str, loc: &str) -> PackageId { - let remote = loc.into_url(); - let master = GitReference::Branch("master".to_string()); - let source_id = SourceId::for_git(&remote.unwrap(), master).unwrap(); - - PackageId::try_new(name, "1.0.0", source_id).unwrap() -} - -pub fn pkg_loc(name: &str, loc: &str) -> Summary { - let link = if name.ends_with("-sys") { - Some(name) - } else { - None - }; - Summary::new( - pkg_id_loc(name, loc), - Vec::new(), - &BTreeMap::new(), - link, - None::, - ) - .unwrap() -} - -pub fn remove_dep(sum: &Summary, ind: usize) -> Summary { - let mut deps = sum.dependencies().to_vec(); - deps.remove(ind); - // note: more things will need to be copied over in the future, but it works for now. - Summary::new( - sum.package_id(), - deps, - &BTreeMap::new(), - sum.links().map(|a| a.as_str()), - None::, - ) - .unwrap() -} - -pub fn dep(name: &str) -> Dependency { - dep_req(name, "*") -} - -pub fn dep_req(name: &str, req: &str) -> Dependency { - Dependency::parse(name, Some(req), registry_loc()).unwrap() -} - -pub fn dep_req_kind(name: &str, req: &str, kind: DepKind) -> Dependency { - let mut dep = dep_req(name, req); - dep.set_kind(kind); - dep -} - -pub fn dep_loc(name: &str, location: &str) -> Dependency { - let url = location.into_url().unwrap(); - let master = GitReference::Branch("master".to_string()); - let source_id = SourceId::for_git(&url, master).unwrap(); - Dependency::parse(name, Some("1.0.0"), source_id).unwrap() -} - -pub fn dep_kind(name: &str, kind: DepKind) -> Dependency { - dep(name).set_kind(kind).clone() -} - -pub fn registry(pkgs: Vec) -> Vec { - pkgs -} - -pub fn names(names: &[P]) -> Vec { - names.iter().map(|name| name.to_pkgid()).collect() -} - -pub fn loc_names(names: &[(&'static str, &'static str)]) -> Vec { - names - .iter() - .map(|&(name, loc)| pkg_id_loc(name, loc)) - .collect() -} - /// By default `Summary` and `Dependency` have a very verbose `Debug` representation. /// This replaces with a representation that uses constructors from this file. /// @@ -608,43 +265,9 @@ impl fmt::Debug for PrettyPrintRegistry { } } -#[test] -fn meta_test_deep_pretty_print_registry() { - assert_eq!( - &format!( - "{:?}", - PrettyPrintRegistry(vec![ - pkg!(("foo", "1.0.1") => [dep_req("bar", "1")]), - pkg!(("foo", "1.0.0") => [dep_req("bar", "2")]), - pkg!(("foo", "2.0.0") => [dep_req("bar", "*")]), - pkg!(("bar", "1.0.0") => [dep_req("baz", "=1.0.2"), - dep_req("other", "1")]), - pkg!(("bar", "2.0.0") => [dep_req("baz", "=1.0.1")]), - pkg!(("baz", "1.0.2") => [dep_req("other", "2")]), - pkg!(("baz", "1.0.1")), - pkg!(("cat", "1.0.2") => [dep_req_kind("other", "2", DepKind::Build)]), - pkg!(("cat", "1.0.3") => [dep_req_kind("other", "2", DepKind::Development)]), - pkg!(("dep_req", "1.0.0")), - pkg!(("dep_req", "2.0.0")), - ]) - ), - "vec![pkg!((\"foo\", \"1.0.1\") => [dep_req(\"bar\", \"^1\"),]),\ - pkg!((\"foo\", \"1.0.0\") => [dep_req(\"bar\", \"^2\"),]),\ - pkg!((\"foo\", \"2.0.0\") => [dep(\"bar\"),]),\ - pkg!((\"bar\", \"1.0.0\") => [dep_req(\"baz\", \"=1.0.2\"),dep_req(\"other\", \"^1\"),]),\ - pkg!((\"bar\", \"2.0.0\") => [dep_req(\"baz\", \"=1.0.1\"),]),\ - pkg!((\"baz\", \"1.0.2\") => [dep_req(\"other\", \"^2\"),]),\ - pkg!((\"baz\", \"1.0.1\")),\ - pkg!((\"cat\", \"1.0.2\") => [dep_req_kind(\"other\", \"^2\", DepKind::Build, false),]),\ - pkg!((\"cat\", \"1.0.3\") => [dep_req_kind(\"other\", \"^2\", DepKind::Development, false),]),\ - pkg!((\"dep_req\", \"1.0.0\")),\ - pkg!((\"dep_req\", \"2.0.0\")),]" - ) -} - /// This generates a random registry index. -/// Unlike vec((Name, Ver, vec((Name, VerRq), ..), ..) -/// This strategy has a high probability of having valid dependencies +/// Unlike `vec((Name, Ver, vec((Name, VerRq), ..), ..)`, +/// this strategy has a high probability of having valid dependencies. pub fn registry_strategy( max_crates: usize, max_versions: usize, @@ -681,7 +304,7 @@ pub fn registry_strategy( vers }); - // each version of each crate can depend on each crate smaller then it. + // each version of each crate can depend on each crate smaller than it. // In theory shrinkage should be 2, but in practice we get better trees with a larger value. let max_deps = max_versions * (max_crates * (max_crates - 1)) / shrinkage; @@ -774,102 +397,125 @@ pub fn registry_strategy( ) } -/// This test is to test the generator to ensure -/// that it makes registries with large dependency trees -#[test] -fn meta_test_deep_trees_from_strategy() { - use proptest::strategy::ValueTree; - use proptest::test_runner::TestRunner; - - let mut dis = [0; 21]; - - let strategy = registry_strategy(50, 20, 60); - let mut test_runner = TestRunner::deterministic(); - for _ in 0..128 { - let PrettyPrintRegistry(input) = strategy - .new_tree(&mut TestRunner::new_with_rng( - Default::default(), - test_runner.new_rng(), - )) - .unwrap() - .current(); - let reg = registry(input.clone()); - for this in input.iter().rev().take(10) { - let res = resolve( - vec![dep_req(&this.name(), &format!("={}", this.version()))], - ®, - ); - dis[res - .as_ref() - .map(|x| min(x.len(), dis.len()) - 1) - .unwrap_or(0)] += 1; - if dis.iter().all(|&x| x > 0) { - return; - } - } - } +#[cfg(test)] +mod tests { + use super::*; + use crate::helpers::registry; - panic!( - "In 1280 tries we did not see a wide enough distribution of dependency trees! dis: {:?}", - dis - ); -} + #[test] + fn meta_test_deep_pretty_print_registry() { + assert_eq!( + &format!( + "{:?}", + PrettyPrintRegistry(vec![ + pkg!(("foo", "1.0.1") => [dep_req("bar", "1")]), + pkg!(("foo", "1.0.0") => [dep_req("bar", "2")]), + pkg!(("foo", "2.0.0") => [dep_req("bar", "*")]), + pkg!(("bar", "1.0.0") => [dep_req("baz", "=1.0.2"), + dep_req("other", "1")]), + pkg!(("bar", "2.0.0") => [dep_req("baz", "=1.0.1")]), + pkg!(("baz", "1.0.2") => [dep_req("other", "2")]), + pkg!(("baz", "1.0.1")), + pkg!(("cat", "1.0.2") => [dep_req_kind("other", "2", DepKind::Build)]), + pkg!(("cat", "1.0.3") => [dep_req_kind("other", "2", DepKind::Development)]), + pkg!(("dep_req", "1.0.0")), + pkg!(("dep_req", "2.0.0")), + ]) + ), + "vec![pkg!((\"foo\", \"1.0.1\") => [dep_req(\"bar\", \"^1\"),]),\ + pkg!((\"foo\", \"1.0.0\") => [dep_req(\"bar\", \"^2\"),]),\ + pkg!((\"foo\", \"2.0.0\") => [dep(\"bar\"),]),\ + pkg!((\"bar\", \"1.0.0\") => [dep_req(\"baz\", \"=1.0.2\"),dep_req(\"other\", \"^1\"),]),\ + pkg!((\"bar\", \"2.0.0\") => [dep_req(\"baz\", \"=1.0.1\"),]),\ + pkg!((\"baz\", \"1.0.2\") => [dep_req(\"other\", \"^2\"),]),\ + pkg!((\"baz\", \"1.0.1\")),\ + pkg!((\"cat\", \"1.0.2\") => [dep_req_kind(\"other\", \"^2\", DepKind::Build, false),]),\ + pkg!((\"cat\", \"1.0.3\") => [dep_req_kind(\"other\", \"^2\", DepKind::Development, false),]),\ + pkg!((\"dep_req\", \"1.0.0\")),\ + pkg!((\"dep_req\", \"2.0.0\")),]" + ) + } -/// This test is to test the generator to ensure -/// that it makes registries that include multiple versions of the same library -#[test] -fn meta_test_multiple_versions_strategy() { - use proptest::strategy::ValueTree; - use proptest::test_runner::TestRunner; - - let mut dis = [0; 10]; - - let strategy = registry_strategy(50, 20, 60); - let mut test_runner = TestRunner::deterministic(); - for _ in 0..128 { - let PrettyPrintRegistry(input) = strategy - .new_tree(&mut TestRunner::new_with_rng( - Default::default(), - test_runner.new_rng(), - )) - .unwrap() - .current(); - let reg = registry(input.clone()); - for this in input.iter().rev().take(10) { - let res = resolve( - vec![dep_req(&this.name(), &format!("={}", this.version()))], - ®, - ); - if let Ok(mut res) = res { - let res_len = res.len(); - res.sort_by_key(|s| s.name()); - res.dedup_by_key(|s| s.name()); - dis[min(res_len - res.len(), dis.len() - 1)] += 1; - } - if dis.iter().all(|&x| x > 0) { - return; + /// This test is to test the generator to ensure + /// that it makes registries with large dependency trees + #[test] + fn meta_test_deep_trees_from_strategy() { + use proptest::strategy::ValueTree; + use proptest::test_runner::TestRunner; + + let mut dis = [0; 21]; + + let strategy = registry_strategy(50, 20, 60); + let mut test_runner = TestRunner::deterministic(); + for _ in 0..128 { + let PrettyPrintRegistry(input) = strategy + .new_tree(&mut TestRunner::new_with_rng( + Default::default(), + test_runner.new_rng(), + )) + .unwrap() + .current(); + let reg = registry(input.clone()); + for this in input.iter().rev().take(10) { + let res = resolve( + vec![dep_req(&this.name(), &format!("={}", this.version()))], + ®, + ); + dis[res + .as_ref() + .map(|x| min(x.len(), dis.len()) - 1) + .unwrap_or(0)] += 1; + if dis.iter().all(|&x| x > 0) { + return; + } } } - } - panic!( - "In 1280 tries we did not see a wide enough distribution of multiple versions of the same library! dis: {:?}", - dis - ); -} -/// Assert `xs` contains `elems` -#[track_caller] -pub fn assert_contains(xs: &[A], elems: &[A]) { - for elem in elems { - assert!( - xs.contains(elem), - "missing element\nset: {xs:?}\nmissing: {elem:?}" + panic!( + "In 1280 tries we did not see a wide enough distribution \ + of dependency trees! dis: {dis:?}" ); } -} -#[track_caller] -pub fn assert_same(a: &[A], b: &[A]) { - assert_eq!(a.len(), b.len(), "not equal\n{a:?}\n{b:?}"); - assert_contains(b, a); + /// This test is to test the generator to ensure + /// that it makes registries that include multiple versions of the same library + #[test] + fn meta_test_multiple_versions_strategy() { + use proptest::strategy::ValueTree; + use proptest::test_runner::TestRunner; + + let mut dis = [0; 10]; + + let strategy = registry_strategy(50, 20, 60); + let mut test_runner = TestRunner::deterministic(); + for _ in 0..128 { + let PrettyPrintRegistry(input) = strategy + .new_tree(&mut TestRunner::new_with_rng( + Default::default(), + test_runner.new_rng(), + )) + .unwrap() + .current(); + let reg = registry(input.clone()); + for this in input.iter().rev().take(10) { + let res = resolve( + vec![dep_req(&this.name(), &format!("={}", this.version()))], + ®, + ); + if let Ok(mut res) = res { + let res_len = res.len(); + res.sort_by_key(|s| s.name()); + res.dedup_by_key(|s| s.name()); + dis[min(res_len - res.len(), dis.len() - 1)] += 1; + } + if dis.iter().all(|&x| x > 0) { + return; + } + } + } + panic!( + "In 1280 tries we did not see a wide enough distribution \ + of multiple versions of the same library! dis: {dis:?}" + ); + } } diff --git a/crates/resolver-tests/src/sat.rs b/crates/resolver-tests/src/sat.rs new file mode 100644 index 00000000000..60546e85d1e --- /dev/null +++ b/crates/resolver-tests/src/sat.rs @@ -0,0 +1,578 @@ +use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet}; +use std::fmt::Write; + +use cargo::core::dependency::DepKind; +use cargo::core::{Dependency, FeatureMap, FeatureValue, PackageId, Summary}; +use cargo::util::interning::{InternedString, INTERNED_DEFAULT}; +use cargo_platform::Platform; +use varisat::ExtendFormula; + +const fn num_bits() -> usize { + std::mem::size_of::() * 8 +} + +fn log_bits(x: usize) -> usize { + if x == 0 { + return 0; + } + assert!(x > 0); + (num_bits::() as u32 - x.leading_zeros()) as usize +} + +// At this point is possible to select every version of every package. +// So we need to mark certain versions as incompatible with each other. +// We could add a clause not A, not B for all A and B that are incompatible, +fn sat_at_most_one(solver: &mut varisat::Solver<'_>, vars: &[varisat::Var]) { + if vars.len() <= 1 { + return; + } else if vars.len() == 2 { + solver.add_clause(&[vars[0].negative(), vars[1].negative()]); + return; + } else if vars.len() == 3 { + solver.add_clause(&[vars[0].negative(), vars[1].negative()]); + solver.add_clause(&[vars[0].negative(), vars[2].negative()]); + solver.add_clause(&[vars[1].negative(), vars[2].negative()]); + return; + } + // There are more efficient ways to do it for large numbers of versions. + // + // Use the "Binary Encoding" from + // https://www.it.uu.se/research/group/astra/ModRef10/papers/Alan%20M.%20Frisch%20and%20Paul%20A.%20Giannoros.%20SAT%20Encodings%20of%20the%20At-Most-k%20Constraint%20-%20ModRef%202010.pdf + let bits: Vec = solver.new_var_iter(log_bits(vars.len())).collect(); + for (i, p) in vars.iter().enumerate() { + for b in 0..bits.len() { + solver.add_clause(&[p.negative(), bits[b].lit(((1 << b) & i) > 0)]); + } + } +} + +fn sat_at_most_one_by_key( + solver: &mut varisat::Solver<'_>, + data: impl Iterator, +) -> HashMap> { + // No two packages with the same keys set + let mut by_keys: HashMap> = HashMap::new(); + for (p, v) in data { + by_keys.entry(p).or_default().push(v) + } + for key in by_keys.values() { + sat_at_most_one(solver, key); + } + by_keys +} + +type DependencyVarMap<'a> = + HashMap), varisat::Var>>; + +type DependencyFeatureVarMap<'a> = HashMap< + InternedString, + HashMap<(DepKind, Option<&'a Platform>), HashMap>, +>; + +fn create_dependencies_vars<'a>( + solver: &mut varisat::Solver<'_>, + pkg_var: varisat::Var, + pkg_dependencies: &'a [Dependency], + pkg_features: &FeatureMap, +) -> (DependencyVarMap<'a>, DependencyFeatureVarMap<'a>) { + let mut var_for_is_dependencies_used = DependencyVarMap::new(); + let mut var_for_is_dependencies_features_used = DependencyFeatureVarMap::new(); + + for dep in pkg_dependencies { + let (name, kind, platform) = (dep.name_in_toml(), dep.kind(), dep.platform()); + + var_for_is_dependencies_used + .entry(name) + .or_default() + .insert((kind, platform), solver.new_var()); + + let dep_feature_var_map = dep + .features() + .iter() + .map(|&f| (f, solver.new_var())) + .collect(); + + var_for_is_dependencies_features_used + .entry(name) + .or_default() + .insert((kind, platform), dep_feature_var_map); + } + + for feature_values in pkg_features.values() { + for feature_value in feature_values { + let FeatureValue::DepFeature { + dep_name, + dep_feature, + weak: _, + } = *feature_value + else { + continue; + }; + + for dep_features_vars in var_for_is_dependencies_features_used + .get_mut(&dep_name) + .expect("feature dep name exists") + .values_mut() + { + dep_features_vars.insert(dep_feature, solver.new_var()); + } + } + } + + // If a package dependency is used, then the package is used + for dep_var_map in var_for_is_dependencies_used.values() { + for dep_var in dep_var_map.values() { + solver.add_clause(&[dep_var.negative(), pkg_var.positive()]); + } + } + + // If a dependency feature is used, then the dependency is used + for (&dep_name, map) in &mut var_for_is_dependencies_features_used { + for (&(dep_kind, dep_platform), dep_feature_var_map) in map { + for dep_feature_var in dep_feature_var_map.values() { + let dep_var_map = &var_for_is_dependencies_used[&dep_name]; + let dep_var = dep_var_map[&(dep_kind, dep_platform)]; + solver.add_clause(&[dep_feature_var.negative(), dep_var.positive()]); + } + } + } + + ( + var_for_is_dependencies_used, + var_for_is_dependencies_features_used, + ) +} + +fn process_pkg_dependencies( + solver: &mut varisat::Solver<'_>, + var_for_is_dependencies_used: &DependencyVarMap<'_>, + var_for_is_dependencies_features_used: &DependencyFeatureVarMap<'_>, + pkg_var: varisat::Var, + pkg_dependencies: &[Dependency], +) { + // Add clauses for package dependencies + for dep in pkg_dependencies { + let (name, kind, platform) = (dep.name_in_toml(), dep.kind(), dep.platform()); + let dep_var_map = &var_for_is_dependencies_used[&name]; + let dep_var = dep_var_map[&(kind, platform)]; + + if !dep.is_optional() { + solver.add_clause(&[pkg_var.negative(), dep_var.positive()]); + } + + for &feature_name in dep.features() { + let dep_feature_var = + &var_for_is_dependencies_features_used[&name][&(kind, platform)][&feature_name]; + + solver.add_clause(&[dep_var.negative(), dep_feature_var.positive()]); + } + } +} + +fn process_pkg_features( + solver: &mut varisat::Solver<'_>, + var_for_is_dependencies_used: &DependencyVarMap<'_>, + var_for_is_dependencies_features_used: &DependencyFeatureVarMap<'_>, + pkg_feature_var_map: &HashMap, + pkg_dependencies: &[Dependency], + pkg_features: &FeatureMap, + check_dev_dependencies: bool, +) { + let optional_dependencies = pkg_dependencies + .iter() + .filter(|dep| dep.is_optional()) + .map(|dep| (dep.kind(), dep.platform(), dep.name_in_toml())) + .collect::>(); + + // Add clauses for package features + for (&feature_name, feature_values) in pkg_features { + for feature_value in feature_values { + let pkg_feature_var = pkg_feature_var_map[&feature_name]; + + match *feature_value { + FeatureValue::Feature(other_feature_name) => { + solver.add_clause(&[ + pkg_feature_var.negative(), + pkg_feature_var_map[&other_feature_name].positive(), + ]); + } + FeatureValue::Dep { dep_name } => { + // Add a clause for each dependency with the provided name (normal/build/dev with target) + for (&(dep_kind, _), &dep_var) in &var_for_is_dependencies_used[&dep_name] { + if dep_kind == DepKind::Development && !check_dev_dependencies { + continue; + } + solver.add_clause(&[pkg_feature_var.negative(), dep_var.positive()]); + } + } + FeatureValue::DepFeature { + dep_name, + dep_feature: dep_feature_name, + weak, + } => { + // Behavior of the feature: + // * if dependency `dep_name` is not optional, its feature `"dep_feature_name"` is activated. + // * if dependency `dep_name` is optional: + // - if this is a weak dependency feature: + // - feature `"dep_feature_name"` of dependency `dep_name` is activated if `dep_name` has been activated via another feature. + // - if this is not a weak dependency feature: + // - feature `dep_name` is activated if it exists. + // - dependency `dep_name` is activated. + // - feature `"dep_feature_name"` of dependency `dep_name` is activated. + + // Add clauses for each dependency with the provided name (normal/build/dev with target) + let dep_var_map = &var_for_is_dependencies_used[&dep_name]; + for (&(dep_kind, dep_platform), &dep_var) in dep_var_map { + if dep_kind == DepKind::Development && !check_dev_dependencies { + continue; + } + + let dep_feature_var = &var_for_is_dependencies_features_used[&dep_name] + [&(dep_kind, dep_platform)][&dep_feature_name]; + + solver.add_clause(&[ + pkg_feature_var.negative(), + dep_var.negative(), + dep_feature_var.positive(), + ]); + + let key = (dep_kind, dep_platform, dep_name); + if !weak && optional_dependencies.contains(&key) { + solver.add_clause(&[pkg_feature_var.negative(), dep_var.positive()]); + + if let Some(other_feature_var) = pkg_feature_var_map.get(&dep_name) { + solver.add_clause(&[ + pkg_feature_var.negative(), + other_feature_var.positive(), + ]); + } + } + } + } + } + } + } +} + +fn process_compatible_dep_summaries( + solver: &mut varisat::Solver<'_>, + var_for_is_dependencies_used: &DependencyVarMap<'_>, + var_for_is_dependencies_features_used: &DependencyFeatureVarMap<'_>, + var_for_is_packages_used: &HashMap, + var_for_is_packages_features_used: &HashMap>, + by_name: &HashMap>, + pkg_dependencies: &[Dependency], + check_dev_dependencies: bool, +) { + for dep in pkg_dependencies { + if dep.kind() == DepKind::Development && !check_dev_dependencies { + continue; + } + + let (name, kind, platform) = (dep.name_in_toml(), dep.kind(), dep.platform()); + let dep_var_map = &var_for_is_dependencies_used[&name]; + let dep_var = dep_var_map[&(kind, platform)]; + + let dep_feature_var_map = &var_for_is_dependencies_features_used[&name][&(kind, platform)]; + + let compatible_summaries = by_name + .get(&dep.package_name()) + .into_iter() + .flatten() + .filter(|s| dep.matches(s)) + .filter(|s| dep.features().iter().all(|f| s.features().contains_key(f))) + .cloned() + .collect::>(); + + // At least one compatible package should be activated + let dep_clause = compatible_summaries + .iter() + .map(|s| var_for_is_packages_used[&s.package_id()].positive()) + .chain([dep_var.negative()]) + .collect::>(); + + solver.add_clause(&dep_clause); + + for (&feature_name, &dep_feature_var) in dep_feature_var_map { + // At least one compatible package with the additional feature should be activated + let dep_feature_clause = compatible_summaries + .iter() + .filter_map(|s| { + var_for_is_packages_features_used[&s.package_id()].get(&feature_name) + }) + .map(|var| var.positive()) + .chain([dep_feature_var.negative()]) + .collect::>(); + + solver.add_clause(&dep_feature_clause); + } + + if dep.uses_default_features() { + // For the selected package for this dependency, the `"default"` feature should be activated if it exists + let mut dep_default_clause = vec![dep_var.negative()]; + + for s in &compatible_summaries { + let s_pkg_id = s.package_id(); + let s_var = var_for_is_packages_used[&s_pkg_id]; + let s_feature_var_map = &var_for_is_packages_features_used[&s_pkg_id]; + + if let Some(s_default_feature_var) = s_feature_var_map.get(&INTERNED_DEFAULT) { + dep_default_clause + .extend_from_slice(&[s_var.negative(), s_default_feature_var.positive()]); + } else { + dep_default_clause.push(s_var.positive()); + } + } + + solver.add_clause(&dep_default_clause); + } + } +} + +/// Resolution can be reduced to the SAT problem. So this is an alternative implementation +/// of the resolver that uses a SAT library for the hard work. This is intended to be easy to read, +/// as compared to the real resolver. +/// +/// For the subset of functionality that are currently made by `registry_strategy`, +/// this will find a valid resolution if one exists. +/// +/// The SAT library does not optimize for the newer version, +/// so the selected packages may not match the real resolver. +pub struct SatResolver { + solver: varisat::Solver<'static>, + old_root_vars: Vec, + var_for_is_packages_used: HashMap, + var_for_is_packages_features_used: HashMap>, + by_name: HashMap>, +} + +impl SatResolver { + pub fn new<'a>(registry: impl IntoIterator) -> Self { + let check_dev_dependencies = false; + + let mut by_name: HashMap> = HashMap::new(); + for pkg in registry { + by_name.entry(pkg.name()).or_default().push(pkg.clone()) + } + + let mut solver = varisat::Solver::new(); + + // Create boolean variables for packages and packages features + let mut var_for_is_packages_used = HashMap::new(); + let mut var_for_is_packages_features_used = HashMap::<_, HashMap<_, _>>::new(); + + for pkg in by_name.values().flatten() { + let pkg_id = pkg.package_id(); + + var_for_is_packages_used.insert(pkg_id, solver.new_var()); + + var_for_is_packages_features_used.insert( + pkg_id, + (pkg.features().keys().map(|&f| (f, solver.new_var()))).collect(), + ); + } + + // If a package feature is used, then the package is used + for (&pkg_id, pkg_feature_var_map) in &var_for_is_packages_features_used { + for pkg_feature_var in pkg_feature_var_map.values() { + let pkg_var = var_for_is_packages_used[&pkg_id]; + solver.add_clause(&[pkg_feature_var.negative(), pkg_var.positive()]); + } + } + + // No two packages with the same links set + sat_at_most_one_by_key( + &mut solver, + by_name + .values() + .flatten() + .map(|s| (s.links(), var_for_is_packages_used[&s.package_id()])) + .filter(|(l, _)| l.is_some()), + ); + + // No two semver compatible versions of the same package + sat_at_most_one_by_key( + &mut solver, + var_for_is_packages_used + .iter() + .map(|(p, &v)| (p.as_activations_key(), v)), + ); + + for pkg in by_name.values().flatten() { + let pkg_id = pkg.package_id(); + let pkg_dependencies = pkg.dependencies(); + let pkg_features = pkg.features(); + let pkg_var = var_for_is_packages_used[&pkg_id]; + + // Create boolean variables for dependencies and dependencies features + let (var_for_is_dependencies_used, var_for_is_dependencies_features_used) = + create_dependencies_vars(&mut solver, pkg_var, pkg_dependencies, pkg_features); + + process_pkg_dependencies( + &mut solver, + &var_for_is_dependencies_used, + &var_for_is_dependencies_features_used, + pkg_var, + pkg_dependencies, + ); + + process_pkg_features( + &mut solver, + &var_for_is_dependencies_used, + &var_for_is_dependencies_features_used, + &var_for_is_packages_features_used[&pkg_id], + pkg_dependencies, + pkg_features, + check_dev_dependencies, + ); + + process_compatible_dep_summaries( + &mut solver, + &var_for_is_dependencies_used, + &var_for_is_dependencies_features_used, + &var_for_is_packages_used, + &var_for_is_packages_features_used, + &by_name, + pkg_dependencies, + check_dev_dependencies, + ); + } + + // We don't need to `solve` now. We know that "use nothing" will satisfy all the clauses so far. + // But things run faster if we let it spend some time figuring out how the constraints interact before we add assumptions. + solver + .solve() + .expect("docs say it can't error in default config"); + + SatResolver { + solver, + old_root_vars: Vec::new(), + var_for_is_packages_used, + var_for_is_packages_features_used, + by_name, + } + } + + pub fn sat_resolve(&mut self, root_dependencies: &[Dependency]) -> bool { + let SatResolver { + solver, + old_root_vars, + var_for_is_packages_used, + var_for_is_packages_features_used, + by_name, + } = self; + + let root_var = solver.new_var(); + + // Create boolean variables for dependencies and dependencies features + let (var_for_is_dependencies_used, var_for_is_dependencies_features_used) = + create_dependencies_vars(solver, root_var, root_dependencies, &FeatureMap::new()); + + process_pkg_dependencies( + solver, + &var_for_is_dependencies_used, + &var_for_is_dependencies_features_used, + root_var, + root_dependencies, + ); + + process_compatible_dep_summaries( + solver, + &var_for_is_dependencies_used, + &var_for_is_dependencies_features_used, + var_for_is_packages_used, + var_for_is_packages_features_used, + by_name, + root_dependencies, + true, + ); + + // Root package is always used. + // Root vars from previous runs are deactivated. + let assumption = old_root_vars + .iter() + .map(|v| v.negative()) + .chain([root_var.positive()]) + .collect::>(); + + old_root_vars.push(root_var); + + solver.assume(&assumption); + + solver + .solve() + .expect("docs say it can't error in default config") + } + + pub fn sat_is_valid_solution(&mut self, pkgs: &[(PackageId, Vec)]) -> bool { + let contains_pkg = |pkg| pkgs.iter().any(|(p, _)| p == pkg); + let contains_pkg_feature = + |pkg, f| pkgs.iter().any(|(p, flist)| p == pkg && flist.contains(f)); + + for (p, _) in pkgs { + if p.name() != "root" && !self.var_for_is_packages_used.contains_key(p) { + return false; + } + } + + // Root vars from previous runs are deactivated + let assumption = (self.old_root_vars.iter().map(|v| v.negative())) + .chain( + self.var_for_is_packages_used + .iter() + .map(|(p, v)| v.lit(contains_pkg(p))), + ) + .chain( + self.var_for_is_packages_features_used + .iter() + .flat_map(|(p, fmap)| { + fmap.iter() + .map(move |(f, v)| v.lit(contains_pkg_feature(p, f))) + }), + ) + .collect::>(); + + self.solver.assume(&assumption); + + self.solver + .solve() + .expect("docs say it can't error in default config") + } + + pub fn used_packages(&self) -> Option { + self.solver.model().map(|lits| { + let lits: HashSet<_> = lits + .iter() + .filter(|l| l.is_positive()) + .map(|l| l.var()) + .collect(); + + let mut used_packages = BTreeMap::>::new(); + for (&p, v) in self.var_for_is_packages_used.iter() { + if lits.contains(v) { + used_packages.entry(p).or_default(); + } + } + for (&p, map) in &self.var_for_is_packages_features_used { + for (&f, v) in map { + if lits.contains(v) { + used_packages + .get_mut(&p) + .expect("the feature is activated without the package being activated") + .insert(f); + } + } + } + + let mut out = String::from("used:\n"); + for (package, feature_names) in used_packages { + writeln!(&mut out, " {package}").unwrap(); + for feature_name in feature_names { + writeln!(&mut out, " + {feature_name}").unwrap(); + } + } + + out + }) + } +} diff --git a/crates/resolver-tests/tests/proptests.rs b/crates/resolver-tests/tests/proptests.rs new file mode 100644 index 00000000000..d1affe68900 --- /dev/null +++ b/crates/resolver-tests/tests/proptests.rs @@ -0,0 +1,273 @@ +use std::io::IsTerminal; + +use cargo::util::GlobalContext; +use cargo_util::is_ci; + +use resolver_tests::{ + helpers::{dep_req, registry, remove_dep}, + registry_strategy, resolve, resolve_and_validated, resolve_with_global_context, + sat::SatResolver, + PrettyPrintRegistry, +}; + +use proptest::prelude::*; + +// NOTE: proptest is a form of fuzz testing. It generates random input and makes sure that +// certain universal truths are upheld. Therefore, it can pass when there is a problem, +// but if it fails then there really is something wrong. When testing something as +// complicated as the resolver, the problems can be very subtle and hard to generate. +// We have had a history of these tests only failing on PRs long after a bug is introduced. +// If you have one of these test fail please report it on #6258, +// and if you did not change the resolver then feel free to retry without concern. +proptest! { + #![proptest_config(ProptestConfig { + max_shrink_iters: + if is_ci() || !std::io::stderr().is_terminal() { + // This attempts to make sure that CI will fail fast, + 0 + } else { + // but that local builds will give a small clear test case. + u32::MAX + }, + result_cache: prop::test_runner::basic_result_cache, + .. ProptestConfig::default() + })] + + /// NOTE: if you think this test has failed spuriously see the note at the top of this macro. + #[test] + fn prop_passes_validation( + PrettyPrintRegistry(input) in registry_strategy(50, 20, 60) + ) { + let reg = registry(input.clone()); + let mut sat_resolver = SatResolver::new(®); + + // There is only a small chance that a crate will be interesting. + // So we try some of the most complicated. + for this in input.iter().rev().take(20) { + let _ = resolve_and_validated( + vec![dep_req(&this.name(), &format!("={}", this.version()))], + ®, + &mut sat_resolver, + ); + } + } + + /// NOTE: if you think this test has failed spuriously see the note at the top of this macro. + #[test] + fn prop_minimum_version_errors_the_same( + PrettyPrintRegistry(input) in registry_strategy(50, 20, 60) + ) { + let mut gctx = GlobalContext::default().unwrap(); + gctx.nightly_features_allowed = true; + gctx + .configure( + 1, + false, + None, + false, + false, + false, + &None, + &["minimal-versions".to_string()], + &[], + ) + .unwrap(); + + let reg = registry(input.clone()); + + // There is only a small chance that a crate will be interesting. + // So we try some of the most complicated. + for this in input.iter().rev().take(10) { + let deps = vec![dep_req(&this.name(), &format!("={}", this.version()))]; + let res = resolve(deps.clone(), ®); + let mres = resolve_with_global_context(deps, ®, &gctx); + + // `minimal-versions` changes what order the candidates are tried but not the existence of a solution. + prop_assert_eq!( + res.is_ok(), + mres.is_ok(), + "minimal-versions and regular resolver disagree about whether `{} = \"={}\"` can resolve", + this.name(), + this.version() + ) + } + } + + /// NOTE: if you think this test has failed spuriously see the note at the top of this macro. + #[test] + fn prop_direct_minimum_version_error_implications( + PrettyPrintRegistry(input) in registry_strategy(50, 20, 60) + ) { + let mut gctx = GlobalContext::default().unwrap(); + gctx.nightly_features_allowed = true; + gctx + .configure( + 1, + false, + None, + false, + false, + false, + &None, + &["direct-minimal-versions".to_string()], + &[], + ) + .unwrap(); + + let reg = registry(input.clone()); + + // There is only a small chance that a crate will be interesting. + // So we try some of the most complicated. + for this in input.iter().rev().take(10) { + let deps = vec![dep_req(&this.name(), &format!("={}", this.version()))]; + let res = resolve(deps.clone(), ®); + let mres = resolve_with_global_context(deps, ®, &gctx); + + // `direct-minimal-versions` reduces the number of available solutions, + // so we verify that we do not come up with solutions not seen in `maximal-versions`. + if res.is_err() { + prop_assert!( + mres.is_err(), + "direct-minimal-versions should not have more solutions than the regular, maximal resolver but found one when resolving `{} = \"={}\"`", + this.name(), + this.version() + ) + } + if mres.is_ok() { + prop_assert!( + res.is_ok(), + "direct-minimal-versions should not have more solutions than the regular, maximal resolver but found one when resolving `{} = \"={}\"`", + this.name(), + this.version() + ) + } + } + } + + /// NOTE: if you think this test has failed spuriously see the note at the top of this macro. + #[test] + fn prop_removing_a_dep_cant_break( + PrettyPrintRegistry(input) in registry_strategy(50, 20, 60), + indexes_to_remove in prop::collection::vec((any::(), any::()), ..10) + ) { + let reg = registry(input.clone()); + let mut removed_input = input.clone(); + for (summary_idx, dep_idx) in indexes_to_remove { + if !removed_input.is_empty() { + let summary_idx = summary_idx.index(removed_input.len()); + let deps = removed_input[summary_idx].dependencies(); + if !deps.is_empty() { + let new = remove_dep(&removed_input[summary_idx], dep_idx.index(deps.len())); + removed_input[summary_idx] = new; + } + } + } + let removed_reg = registry(removed_input); + + // There is only a small chance that a crate will be interesting. + // So we try some of the most complicated. + for this in input.iter().rev().take(10) { + if resolve( + vec![dep_req(&this.name(), &format!("={}", this.version()))], + ®, + ).is_ok() { + prop_assert!( + resolve( + vec![dep_req(&this.name(), &format!("={}", this.version()))], + &removed_reg, + ).is_ok(), + "full index worked for `{} = \"={}\"` but removing some deps broke it!", + this.name(), + this.version(), + ) + } + } + } + + /// NOTE: if you think this test has failed spuriously see the note at the top of this macro. + #[test] + fn prop_limited_independence_of_irrelevant_alternatives( + PrettyPrintRegistry(input) in registry_strategy(50, 20, 60), + indexes_to_unpublish in prop::collection::vec(any::(), ..10) + ) { + let reg = registry(input.clone()); + + // There is only a small chance that a crate will be interesting. + // So we try some of the most complicated. + for this in input.iter().rev().take(10) { + let res = resolve( + vec![dep_req(&this.name(), &format!("={}", this.version()))], + ®, + ); + + match res { + Ok(r) => { + // If resolution was successful, then unpublishing a version of a crate + // that was not selected should not change that. + let not_selected: Vec<_> = input + .iter() + .cloned() + .filter(|x| !r.contains(&x.package_id())) + .collect(); + + if !not_selected.is_empty() { + let indexes_to_unpublish: Vec<_> = indexes_to_unpublish.iter().map(|x| x.get(¬_selected)).collect(); + + let new_reg = registry( + input + .iter() + .cloned() + .filter(|x| !indexes_to_unpublish.contains(&x)) + .collect(), + ); + + let res = resolve( + vec![dep_req(&this.name(), &format!("={}", this.version()))], + &new_reg, + ); + + // Note: that we can not assert that the two `res` are identical + // as the resolver does depend on irrelevant alternatives. + // It uses how constrained a dependency requirement is + // to determine what order to evaluate requirements. + + prop_assert!( + res.is_ok(), + "unpublishing {:?} stopped `{} = \"={}\"` from working", + indexes_to_unpublish.iter().map(|x| x.package_id()).collect::>(), + this.name(), + this.version() + ) + } + } + + Err(_) => { + // If resolution was unsuccessful, then it should stay unsuccessful + // even if any version of a crate is unpublished. + let indexes_to_unpublish: Vec<_> = indexes_to_unpublish.iter().map(|x| x.get(&input)).collect(); + + let new_reg = registry( + input + .iter() + .cloned() + .filter(|x| !indexes_to_unpublish.contains(&x)) + .collect(), + ); + + let res = resolve( + vec![dep_req(&this.name(), &format!("={}", this.version()))], + &new_reg, + ); + + prop_assert!( + res.is_err(), + "full index did not work for `{} = \"={}\"` but unpublishing {:?} fixed it!", + this.name(), + this.version(), + indexes_to_unpublish.iter().map(|x| x.package_id()).collect::>(), + ) + } + } + } + } +} diff --git a/crates/resolver-tests/tests/pubgrub.rs b/crates/resolver-tests/tests/pubgrub.rs new file mode 100644 index 00000000000..fec5a8757bc --- /dev/null +++ b/crates/resolver-tests/tests/pubgrub.rs @@ -0,0 +1,452 @@ +use cargo::core::{dependency::DepKind, Dependency}; + +use resolver_tests::{ + helpers::{ + dep, dep_kind, dep_req, dep_req_kind, pkg, pkg_dep, pkg_dep_link, pkg_dep_with, + pkg_id_source, registry, ToDep, + }, + pkg, resolve, resolve_and_validated, resolve_and_validated_raw, + sat::SatResolver, +}; + +#[test] +fn test_01_renamed_package() { + let reg = registry(vec![ + pkg_dep_with( + "a", + vec!["b".opt().rename("b_package")], + &[("default", &["b_package"])], + ), + pkg("b"), + ]); + + let deps = vec!["a".with(&["default"])]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_ok()); +} + +#[test] +fn test_02_renamed_package_no_shadowing() { + let reg = registry(vec![ + pkg("url"), + pkg_dep("wasmi", vec!["wasmparser-nostd".rename("wasmparser")]), + pkg_dep("wasmparser", vec!["url".to_dep()]), + ]); + + let deps = vec![dep("wasmi")]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_err()); +} + +#[test] +fn test_03_prerelease_semver() { + let reg = registry(vec![ + pkg!("parking_lot_core" => [dep_req("smallvec", "^1.6.1")]), + pkg(("smallvec", "2.0.0-alpha.3")), + pkg_dep_with( + ("tokio", "1.35.1"), + vec!["parking_lot_core".opt()], + &[("default", &["parking_lot_core"])], + ), + ]); + + let deps = vec!["tokio".with(&["default"])]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_err()); +} + +#[test] +fn test_04_cyclic_features() { + let reg = registry(vec![pkg_dep_with( + "windows", + vec![], + &[ + ("Win32", &["Win32_Foundation"]), + ("Win32_Foundation", &["Win32"]), + ], + )]); + + let deps = vec!["windows".with(&["Win32_Foundation"])]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_ok()); +} + +#[test] +fn test_05_cyclic_optional_dependencies() { + let reg = registry(vec![ + pkg_dep("async-global-executor", vec!["io-lifetimes".opt()]), + pkg_dep( + "io-lifetimes", + vec!["test".with(&["async-global-executor"])], + ), + pkg_dep_with( + "test", + vec!["async-global-executor".opt().with(&["io-lifetimes"])], + &[], + ), + ]); + + let deps = vec![dep("test")]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_ok()); +} + +#[test] +fn test_06_cyclic_dependencies() { + let reg = registry(vec![ + pkg(("a", "1.0.0")), + pkg_dep(("a", "1.0.1"), vec![dep("dep")]), + pkg_dep("dep", vec![dep("a")]), + ]); + + let deps = vec![dep("dep")]; + + // Cyclic dependencies are not checked in the SAT resolver + assert!(resolve(deps.clone(), ®).is_err()); + assert!(SatResolver::new(®).sat_resolve(&deps)); +} + +#[test] +fn test_07_self_dependency() { + let reg = registry(vec![pkg_dep("dep", vec![dep("dep")])]); + + let deps = vec![dep("dep")]; + + // Cyclic dependencies are not checked in the SAT resolver + assert!(resolve(deps.clone(), ®).is_err()); + assert!(SatResolver::new(®).sat_resolve(&deps)); +} + +#[test] +fn test_08_activated_optional_self_dependency() { + let reg = registry(vec![pkg_dep("a", vec!["a".opt()])]); + + let deps = vec!["a".with(&["a"])]; + + // Cyclic dependencies are not checked in the SAT resolver + assert!(resolve(deps.clone(), ®).is_err()); + assert!(SatResolver::new(®).sat_resolve(&deps)); +} + +#[test] +fn test_09_build_dependency_with_same_name() { + let reg = registry(vec![ + pkg("memchr"), + pkg_dep_with( + ("regex", "1.4.6"), + vec!["memchr".opt()], + &[("default", &["memchr"])], + ), + pkg_dep("sv-parser", vec!["regex".with(&["default"])]), + pkg_dep( + "svlint", + vec![ + dep_req("regex", "^1.5"), + dep_req_kind("regex", "^1", DepKind::Build), + dep("sv-parser"), + ], + ), + ]); + + let deps = vec![dep("svlint")]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_err()); +} + +#[test] +fn test_10_root_dev_dependency_with_same_name() { + let reg = registry(vec![pkg(("root", "1.0.1"))]); + + let deps = vec![dep_req_kind("root", "=1.0.1", DepKind::Development).rename("root101")]; + let source = pkg_id_source("root", "https://root.example.com"); + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated_raw(deps, ®, source, &mut sat_resolver).is_ok()); +} + +#[test] +fn test_11_dev_dependency() { + let reg = registry(vec![pkg_dep_with( + "burn-core", + vec![dep_kind("burn-ndarray", DepKind::Development)], + &[("default", &["burn-ndarray/std"])], + )]); + + let deps = vec!["burn-core".with(&["default"])]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_ok()); +} + +#[test] +fn test_12_weak_dependencies() { + let reg = registry(vec![ + pkg_dep_with("borsh", vec!["borsh-derive".opt()], &[("std", &[])]), + pkg_dep_with( + "rust_decimal", + vec!["borsh".opt().with(&["borsh-derive"])], + &[("default", &["borsh?/std"])], + ), + ]); + + let deps = vec!["rust_decimal".with(&["default"])]; + + // Weak dependencies are not supported yet in the dependency resolver + assert!(resolve(deps.clone(), ®).is_err()); + assert!(SatResolver::new(®).sat_resolve(&deps)); +} + +#[test] +fn test_13_weak_dependencies() { + let reg = registry(vec![ + pkg_dep_with("memchr", vec!["std".opt()], &[("std", &["dep:std"])]), + pkg_dep_with( + "winnow", + vec!["memchr".opt()], + &[("default", &["memchr?/std"]), ("simd", &["dep:memchr"])], + ), + ]); + + let deps = vec!["winnow".with(&["default"])]; + + // Weak dependencies are not supported yet in the dependency resolver + assert!(resolve(deps.clone(), ®).is_err()); + assert!(SatResolver::new(®).sat_resolve(&deps)); +} + +#[test] +fn test_14_weak_dependencies() { + let reg = registry(vec![ + pkg_dep("a", vec![dep("bad")]), + pkg_dep_with("b", vec!["a".opt()], &[("perf-literal", &["dep:a"])]), + pkg_dep_with( + "c", + vec!["b".opt()], + &[ + ("perf-literal", &["b?/perf-literal"]), + ("perf-literal-multisubstring", &["dep:b"]), + ], + ), + pkg_dep_with("dep", vec![dep("c")], &[("default", &["c/perf-literal"])]), + ]); + + let deps = vec!["dep".with(&["default"])]; + + // Weak dependencies are not supported yet in the dependency resolver + assert!(resolve(deps.clone(), ®).is_err()); + assert!(SatResolver::new(®).sat_resolve(&deps)); +} + +#[test] +fn test_15_duplicate_sys_crate() { + let reg = registry(vec![ + pkg_dep_link("js", "js", vec![]), + pkg_dep_link("dep", "js", vec![dep("js")]), + ]); + + let deps = vec![dep("dep")]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_err()); +} + +#[test] +fn test_16_missing_optional_dependency() { + let reg = registry(vec![ + pkg_dep("b", vec!["c".opt()]), + pkg_dep_with("dep", vec![dep("b")], &[("d", &["b/c"])]), + ]); + + let deps = vec!["dep".with(&["d"])]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_err()); +} + +#[test] +fn test_17_feature_shadowing_missing_optional_dependency() { + let reg = registry(vec![pkg_dep_with( + "rustix", + vec!["alloc".opt()], + &[ + ("alloc", &[]), + ("default", &["alloc"]), + ("rustc-dep-of-std", &["dep:alloc"]), + ], + )]); + + let deps = vec!["rustix".with(&["default"])]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_ok()); +} + +#[test] +fn test_18_feature_shadowing_activated_optional_dependency() { + let reg = registry(vec![ + pkg_dep("alloc", vec![dep("bad")]), + pkg_dep_with( + "rustix", + vec!["alloc".opt()], + &[ + ("alloc", &[]), + ("default", &["dep:alloc"]), + ("rustc-dep-of-std", &["alloc"]), + ], + ), + ]); + + let deps = vec!["rustix".with(&["default"])]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_err()); +} + +#[test] +fn test_19_same_dep_twice_feature_unification() { + let reg = registry(vec![ + pkg_dep_with( + "iced", + vec!["iced_wgpu".opt(), "iced_wgpu".opt().with(&["webgl"])], + &[("default", &["iced_wgpu"])], + ), + pkg_dep_with("iced_wgpu", vec![], &[("webgl", &[])]), + ]); + + let deps = vec!["iced".with(&["default"])]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_ok()); +} + +#[test] +fn test_20_no_implicit_feature() { + let reg = registry(vec![ + pkg("c"), + pkg_dep_with("ureq", vec!["c".opt()], &[("cookies", &["dep:c"])]), + pkg_dep_with("dep", vec![dep("ureq")], &[("cookies", &["ureq/c"])]), + ]); + + let deps = vec!["dep".with(&["cookies"])]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_err()); +} + +#[test] +fn test_21_implicit_feature() { + let reg = registry(vec![ + pkg("c"), + pkg_dep("ureq", vec!["c".opt()]), + pkg_dep_with("dep", vec![dep("ureq")], &[("cookies", &["ureq/c"])]), + ]); + + let deps = vec!["dep".with(&["cookies"])]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_ok()); +} + +#[test] +fn test_22_missing_explicit_default_feature() { + let reg = registry(vec![ + pkg_dep_with( + "fuel-tx", + vec![dep("serde"), "serde_json".opt()], + &[("default", &["serde/default"]), ("serde", &["serde_json"])], + ), + pkg("serde"), + ]); + + let deps = vec!["fuel-tx".with(&["default"])]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_err()); +} + +#[test] +fn test_23_no_need_for_explicit_default_feature() { + let reg = registry(vec![ + pkg("a"), + pkg_dep_with( + "b", + vec!["a".with_default()], + &[("default", &["std"]), ("std", &[])], + ), + ]); + + let deps = vec!["b".with(&["default"])]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_ok()); +} + +#[test] +fn test_24_dep_feature() { + let reg = registry(vec![ + pkg_dep_with("proc-macro2", vec![], &[("proc-macro", &[])]), + pkg_dep_with( + "syn", + vec![dep("proc-macro2")], + &[("proc-macro", &["proc-macro2/proc-macro"])], + ), + pkg_dep("serde_derive", vec!["syn".with(&["proc-macro"])]), + ]); + + let deps = vec![dep("serde_derive")]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_ok()); +} + +#[test] +fn test_25_dep_feature() { + let reg = registry(vec![ + pkg_dep_with("proc-macro2", vec![], &[("proc-macro", &[])]), + pkg_dep_with( + "syn", + vec![dep("proc-macro2")], + &[("proc-macro", &["proc-macro2/proc-macro"])], + ), + ]); + + let deps = vec!["syn".with(&["proc-macro"])]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_ok()); +} + +#[test] +fn test_26_implicit_feature_with_dep_feature() { + let reg = registry(vec![ + pkg_dep_with("quote", vec![], &[("proc-macro", &[])]), + pkg_dep_with( + "syn", + vec!["quote".opt()], + &[("default", &["quote", "quote/proc-macro"])], + ), + ]); + + let deps = vec!["syn".with(&["default"])]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_ok()); +} + +#[test] +fn test_27_dep_feature_activating_shadowing_feature() { + let reg = registry(vec![ + pkg_dep_with( + "a", + vec!["b".opt(), "x".opt()], + &[("b", &["x"]), ("default", &["b/native"])], + ), + pkg_dep_with("b", vec![], &[("native", &[])]), + ]); + + let deps = vec!["a".with(&["default"])]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_err()); +} + +#[test] +fn test_28_dep_feature_not_activating_shadowing_feature() { + let reg = registry(vec![ + pkg_dep_with( + "fuel-tx", + vec![dep("serde"), "serde_json".opt()], + &[("default", &["serde/default"]), ("serde", &["serde_json"])], + ), + pkg_dep_with("serde", vec![], &[("default", &[])]), + ]); + + let deps = vec!["fuel-tx".with(&["default"])]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_ok()); +} diff --git a/crates/resolver-tests/tests/resolve.rs b/crates/resolver-tests/tests/resolve.rs index 2728660b2bf..be358459a07 100644 --- a/crates/resolver-tests/tests/resolve.rs +++ b/crates/resolver-tests/tests/resolve.rs @@ -1,297 +1,20 @@ -use std::io::IsTerminal; - use cargo::core::dependency::DepKind; use cargo::core::Dependency; use cargo::util::GlobalContext; -use cargo_util::is_ci; use resolver_tests::{ - assert_contains, assert_same, dep, dep_kind, dep_loc, dep_req, loc_names, names, pkg, pkg_id, - pkg_loc, registry, registry_strategy, remove_dep, resolve, resolve_and_validated, - resolve_with_global_context, PrettyPrintRegistry, SatResolve, ToDep, ToPkgId, + helpers::{ + assert_contains, assert_same, dep, dep_kind, dep_loc, dep_req, loc_names, names, pkg_id, + pkg_loc, registry, ToPkgId, + }, + pkg, resolve, resolve_with_global_context, }; -use proptest::prelude::*; - -// NOTE: proptest is a form of fuzz testing. It generates random input and makes sure that -// certain universal truths are upheld. Therefore, it can pass when there is a problem, -// but if it fails then there really is something wrong. When testing something as -// complicated as the resolver, the problems can be very subtle and hard to generate. -// We have had a history of these tests only failing on PRs long after a bug is introduced. -// If you have one of these test fail please report it on #6258, -// and if you did not change the resolver then feel free to retry without concern. -proptest! { - #![proptest_config(ProptestConfig { - max_shrink_iters: - if is_ci() || !std::io::stderr().is_terminal() { - // This attempts to make sure that CI will fail fast, - 0 - } else { - // but that local builds will give a small clear test case. - u32::MAX - }, - result_cache: prop::test_runner::basic_result_cache, - .. ProptestConfig::default() - })] - - /// NOTE: if you think this test has failed spuriously see the note at the top of this macro. - #[test] - fn prop_passes_validation( - PrettyPrintRegistry(input) in registry_strategy(50, 20, 60) - ) { - let reg = registry(input.clone()); - let sat_resolve = SatResolve::new(®); - // there is only a small chance that any one - // crate will be interesting. - // So we try some of the most complicated. - for this in input.iter().rev().take(20) { - let _ = resolve_and_validated( - vec![dep_req(&this.name(), &format!("={}", this.version()))], - ®, - Some(sat_resolve.clone()), - ); - } - } - - /// NOTE: if you think this test has failed spuriously see the note at the top of this macro. - #[test] - fn prop_minimum_version_errors_the_same( - PrettyPrintRegistry(input) in registry_strategy(50, 20, 60) - ) { - let mut gctx = GlobalContext::default().unwrap(); - gctx.nightly_features_allowed = true; - gctx - .configure( - 1, - false, - None, - false, - false, - false, - &None, - &["minimal-versions".to_string()], - &[], - ) - .unwrap(); - - let reg = registry(input.clone()); - // there is only a small chance that any one - // crate will be interesting. - // So we try some of the most complicated. - for this in input.iter().rev().take(10) { - // minimal-versions change what order the candidates - // are tried but not the existence of a solution - let res = resolve( - vec![dep_req(&this.name(), &format!("={}", this.version()))], - ®, - ); - - let mres = resolve_with_global_context( - vec![dep_req(&this.name(), &format!("={}", this.version()))], - ®, - &gctx, - ); - - prop_assert_eq!( - res.is_ok(), - mres.is_ok(), - "minimal-versions and regular resolver disagree about whether `{} = \"={}\"` can resolve", - this.name(), - this.version() - ) - } - } - - /// NOTE: if you think this test has failed spuriously see the note at the top of this macro. - #[test] - fn prop_direct_minimum_version_error_implications( - PrettyPrintRegistry(input) in registry_strategy(50, 20, 60) - ) { - let mut gctx = GlobalContext::default().unwrap(); - gctx.nightly_features_allowed = true; - gctx - .configure( - 1, - false, - None, - false, - false, - false, - &None, - &["direct-minimal-versions".to_string()], - &[], - ) - .unwrap(); - - let reg = registry(input.clone()); - // there is only a small chance that any one - // crate will be interesting. - // So we try some of the most complicated. - for this in input.iter().rev().take(10) { - // direct-minimal-versions reduces the number of available solutions, so we verify that - // we do not come up with solutions maximal versions does not - let res = resolve( - vec![dep_req(&this.name(), &format!("={}", this.version()))], - ®, - ); - - let mres = resolve_with_global_context( - vec![dep_req(&this.name(), &format!("={}", this.version()))], - ®, - &gctx, - ); - - if res.is_err() { - prop_assert!( - mres.is_err(), - "direct-minimal-versions should not have more solutions than the regular, maximal resolver but found one when resolving `{} = \"={}\"`", - this.name(), - this.version() - ) - } - if mres.is_ok() { - prop_assert!( - res.is_ok(), - "direct-minimal-versions should not have more solutions than the regular, maximal resolver but found one when resolving `{} = \"={}\"`", - this.name(), - this.version() - ) - } - } - } - - /// NOTE: if you think this test has failed spuriously see the note at the top of this macro. - #[test] - fn prop_removing_a_dep_cant_break( - PrettyPrintRegistry(input) in registry_strategy(50, 20, 60), - indexes_to_remove in prop::collection::vec((any::(), any::()), ..10) - ) { - let reg = registry(input.clone()); - let mut removed_input = input.clone(); - for (summary_idx, dep_idx) in indexes_to_remove { - if !removed_input.is_empty() { - let summary_idx = summary_idx.index(removed_input.len()); - let deps = removed_input[summary_idx].dependencies(); - if !deps.is_empty() { - let new = remove_dep(&removed_input[summary_idx], dep_idx.index(deps.len())); - removed_input[summary_idx] = new; - } - } - } - let removed_reg = registry(removed_input); - // there is only a small chance that any one - // crate will be interesting. - // So we try some of the most complicated. - for this in input.iter().rev().take(10) { - if resolve( - vec![dep_req(&this.name(), &format!("={}", this.version()))], - ®, - ).is_ok() { - prop_assert!( - resolve( - vec![dep_req(&this.name(), &format!("={}", this.version()))], - &removed_reg, - ).is_ok(), - "full index worked for `{} = \"={}\"` but removing some deps broke it!", - this.name(), - this.version(), - ) - } - } - } - - /// NOTE: if you think this test has failed spuriously see the note at the top of this macro. - #[test] - fn prop_limited_independence_of_irrelevant_alternatives( - PrettyPrintRegistry(input) in registry_strategy(50, 20, 60), - indexes_to_unpublish in prop::collection::vec(any::(), ..10) - ) { - let reg = registry(input.clone()); - // there is only a small chance that any one - // crate will be interesting. - // So we try some of the most complicated. - for this in input.iter().rev().take(10) { - let res = resolve( - vec![dep_req(&this.name(), &format!("={}", this.version()))], - ®, - ); - - match res { - Ok(r) => { - // If resolution was successful, then unpublishing a version of a crate - // that was not selected should not change that. - let not_selected: Vec<_> = input - .iter() - .cloned() - .filter(|x| !r.contains(&x.package_id())) - .collect(); - if !not_selected.is_empty() { - let indexes_to_unpublish: Vec<_> = indexes_to_unpublish.iter().map(|x| x.get(¬_selected)).collect(); - - let new_reg = registry( - input - .iter() - .cloned() - .filter(|x| !indexes_to_unpublish.contains(&x)) - .collect(), - ); - - let res = resolve( - vec![dep_req(&this.name(), &format!("={}", this.version()))], - &new_reg, - ); - - // Note: that we can not assert that the two `res` are identical - // as the resolver does depend on irrelevant alternatives. - // It uses how constrained a dependency requirement is - // to determine what order to evaluate requirements. - - prop_assert!( - res.is_ok(), - "unpublishing {:?} stopped `{} = \"={}\"` from working", - indexes_to_unpublish.iter().map(|x| x.package_id()).collect::>(), - this.name(), - this.version() - ) - } - } - - Err(_) => { - // If resolution was unsuccessful, then it should stay unsuccessful - // even if any version of a crate is unpublished. - let indexes_to_unpublish: Vec<_> = indexes_to_unpublish.iter().map(|x| x.get(&input)).collect(); - - let new_reg = registry( - input - .iter() - .cloned() - .filter(|x| !indexes_to_unpublish.contains(&x)) - .collect(), - ); - - let res = resolve( - vec![dep_req(&this.name(), &format!("={}", this.version()))], - &new_reg, - ); - - prop_assert!( - res.is_err(), - "full index did not work for `{} = \"={}\"` but unpublishing {:?} fixed it!", - this.name(), - this.version(), - indexes_to_unpublish.iter().map(|x| x.package_id()).collect::>(), - ) - } - } - } - } -} - #[test] #[should_panic(expected = "assertion failed: !name.is_empty()")] fn test_dependency_with_empty_name() { // Bug 5229, dependency-names must not be empty - "".to_dep(); + dep(""); } #[test] @@ -460,7 +183,10 @@ fn test_resolving_minimum_version_with_transitive_deps() { ®, &gctx, ) - .unwrap(); + .unwrap() + .into_iter() + .map(|(pkg, _)| pkg) + .collect::>(); assert_contains( &res, @@ -659,8 +385,8 @@ fn resolving_with_constrained_sibling_backtrack_parent() { let vsn = format!("1.0.{}", i); reglist.push( pkg!(("bar", vsn.clone()) => [dep_req("backtrack_trap1", "1.0.2"), - dep_req("backtrack_trap2", "1.0.2"), - dep_req("constrained", "1.0.1")]), + dep_req("backtrack_trap2", "1.0.2"), + dep_req("constrained", "1.0.1")]), ); reglist.push(pkg!(("backtrack_trap1", vsn.clone()))); reglist.push(pkg!(("backtrack_trap2", vsn.clone()))); @@ -1211,118 +937,6 @@ fn large_conflict_cache() { let _ = resolve(root_deps, ®); } -#[test] -fn off_by_one_bug() { - let input = vec![ - pkg!(("A-sys", "0.0.1")), - pkg!(("A-sys", "0.0.4")), - pkg!(("A-sys", "0.0.6")), - pkg!(("A-sys", "0.0.7")), - pkg!(("NA", "0.0.0") => [dep_req("A-sys", "<= 0.0.5"),]), - pkg!(("NA", "0.0.1") => [dep_req("A-sys", ">= 0.0.6, <= 0.0.8"),]), - pkg!(("a", "0.0.1")), - pkg!(("a", "0.0.2")), - pkg!(("aa", "0.0.0") => [dep_req("A-sys", ">= 0.0.4, <= 0.0.6"),dep_req("NA", "<= 0.0.0"),]), - pkg!(("f", "0.0.3") => [dep("NA"),dep_req("a", "<= 0.0.2"),dep("aa"),]), - ]; - - let reg = registry(input); - let _ = resolve_and_validated(vec![dep("f")], ®, None); -} - -#[test] -fn conflict_store_bug() { - let input = vec![ - pkg!(("A", "0.0.3")), - pkg!(("A", "0.0.5")), - pkg!(("A", "0.0.9") => [dep("bad"),]), - pkg!(("A", "0.0.10") => [dep("bad"),]), - pkg!(("L-sys", "0.0.1") => [dep("bad"),]), - pkg!(("L-sys", "0.0.5")), - pkg!(("R", "0.0.4") => [ - dep_req("L-sys", "= 0.0.5"), - ]), - pkg!(("R", "0.0.6")), - pkg!(("a-sys", "0.0.5")), - pkg!(("a-sys", "0.0.11")), - pkg!(("c", "0.0.12") => [ - dep_req("R", ">= 0.0.3, <= 0.0.4"), - ]), - pkg!(("c", "0.0.13") => [ - dep_req("a-sys", ">= 0.0.8, <= 0.0.11"), - ]), - pkg!(("c0", "0.0.6") => [ - dep_req("L-sys", "<= 0.0.2"), - ]), - pkg!(("c0", "0.0.10") => [ - dep_req("A", ">= 0.0.9, <= 0.0.10"), - dep_req("a-sys", "= 0.0.5"), - ]), - pkg!("j" => [ - dep_req("A", ">= 0.0.3, <= 0.0.5"), - dep_req("R", ">=0.0.4, <= 0.0.6"), - dep_req("c", ">= 0.0.9"), - dep_req("c0", ">= 0.0.6"), - ]), - ]; - - let reg = registry(input); - let _ = resolve_and_validated(vec![dep("j")], ®, None); -} - -#[test] -fn conflict_store_more_then_one_match() { - let input = vec![ - pkg!(("A", "0.0.0")), - pkg!(("A", "0.0.1")), - pkg!(("A-sys", "0.0.0")), - pkg!(("A-sys", "0.0.1")), - pkg!(("A-sys", "0.0.2")), - pkg!(("A-sys", "0.0.3")), - pkg!(("A-sys", "0.0.12")), - pkg!(("A-sys", "0.0.16")), - pkg!(("B-sys", "0.0.0")), - pkg!(("B-sys", "0.0.1")), - pkg!(("B-sys", "0.0.2") => [dep_req("A-sys", "= 0.0.12"),]), - pkg!(("BA-sys", "0.0.0") => [dep_req("A-sys","= 0.0.16"),]), - pkg!(("BA-sys", "0.0.1") => [dep("bad"),]), - pkg!(("BA-sys", "0.0.2") => [dep("bad"),]), - pkg!("nA" => [ - dep("A"), - dep_req("A-sys", "<= 0.0.3"), - dep("B-sys"), - dep("BA-sys"), - ]), - ]; - let reg = registry(input); - let _ = resolve_and_validated(vec![dep("nA")], ®, None); -} - -#[test] -fn bad_lockfile_from_8249() { - let input = vec![ - pkg!(("a-sys", "0.2.0")), - pkg!(("a-sys", "0.1.0")), - pkg!(("b", "0.1.0") => [ - dep_req("a-sys", "0.1"), // should be optional: true, but not deeded for now - ]), - pkg!(("c", "1.0.0") => [ - dep_req("b", "=0.1.0"), - ]), - pkg!("foo" => [ - dep_req("a-sys", "=0.2.0"), - { - let mut b = dep_req("b", "=0.1.0"); - b.set_features(vec!["a-sys"]); - b - }, - dep_req("c", "=1.0.0"), - ]), - ]; - let reg = registry(input); - let _ = resolve_and_validated(vec![dep("foo")], ®, None); -} - #[test] fn cyclic_good_error_message() { let input = vec![ diff --git a/crates/resolver-tests/tests/validated.rs b/crates/resolver-tests/tests/validated.rs new file mode 100644 index 00000000000..5eaccf29252 --- /dev/null +++ b/crates/resolver-tests/tests/validated.rs @@ -0,0 +1,528 @@ +use cargo::core::{dependency::DepKind, Dependency}; + +use resolver_tests::{ + helpers::{ + dep, dep_kind, dep_platform, dep_req, dep_req_kind, dep_req_platform, pkg, pkg_dep, + pkg_dep_with, registry, ToDep, + }, + pkg, resolve, resolve_and_validated, + sat::SatResolver, +}; + +#[test] +fn off_by_one_bug() { + let input = vec![ + pkg!(("A-sys", "0.0.1")), + pkg!(("A-sys", "0.0.4")), + pkg!(("A-sys", "0.0.6")), + pkg!(("A-sys", "0.0.7")), + pkg!(("NA", "0.0.0") => [dep_req("A-sys", "<= 0.0.5"),]), + pkg!(("NA", "0.0.1") => [dep_req("A-sys", ">= 0.0.6, <= 0.0.8"),]), + pkg!(("a", "0.0.1")), + pkg!(("a", "0.0.2")), + pkg!(("aa", "0.0.0") => [dep_req("A-sys", ">= 0.0.4, <= 0.0.6"),dep_req("NA", "<= 0.0.0"),]), + pkg!(("f", "0.0.3") => [dep("NA"),dep_req("a", "<= 0.0.2"),dep("aa"),]), + ]; + + let reg = registry(input); + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(vec![dep("f")], ®, &mut sat_resolver).is_ok()); +} + +#[test] +fn conflict_store_bug() { + let input = vec![ + pkg!(("A", "0.0.3")), + pkg!(("A", "0.0.5")), + pkg!(("A", "0.0.9") => [dep("bad"),]), + pkg!(("A", "0.0.10") => [dep("bad"),]), + pkg!(("L-sys", "0.0.1") => [dep("bad"),]), + pkg!(("L-sys", "0.0.5")), + pkg!(("R", "0.0.4") => [ + dep_req("L-sys", "= 0.0.5"), + ]), + pkg!(("R", "0.0.6")), + pkg!(("a-sys", "0.0.5")), + pkg!(("a-sys", "0.0.11")), + pkg!(("c", "0.0.12") => [ + dep_req("R", ">= 0.0.3, <= 0.0.4"), + ]), + pkg!(("c", "0.0.13") => [ + dep_req("a-sys", ">= 0.0.8, <= 0.0.11"), + ]), + pkg!(("c0", "0.0.6") => [ + dep_req("L-sys", "<= 0.0.2"), + ]), + pkg!(("c0", "0.0.10") => [ + dep_req("A", ">= 0.0.9, <= 0.0.10"), + dep_req("a-sys", "= 0.0.5"), + ]), + pkg!("j" => [ + dep_req("A", ">= 0.0.3, <= 0.0.5"), + dep_req("R", ">=0.0.4, <= 0.0.6"), + dep_req("c", ">= 0.0.9"), + dep_req("c0", ">= 0.0.6"), + ]), + ]; + + let reg = registry(input); + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(vec![dep("j")], ®, &mut sat_resolver).is_err()); +} + +#[test] +fn conflict_store_more_then_one_match() { + let input = vec![ + pkg!(("A", "0.0.0")), + pkg!(("A", "0.0.1")), + pkg!(("A-sys", "0.0.0")), + pkg!(("A-sys", "0.0.1")), + pkg!(("A-sys", "0.0.2")), + pkg!(("A-sys", "0.0.3")), + pkg!(("A-sys", "0.0.12")), + pkg!(("A-sys", "0.0.16")), + pkg!(("B-sys", "0.0.0")), + pkg!(("B-sys", "0.0.1")), + pkg!(("B-sys", "0.0.2") => [dep_req("A-sys", "= 0.0.12"),]), + pkg!(("BA-sys", "0.0.0") => [dep_req("A-sys","= 0.0.16"),]), + pkg!(("BA-sys", "0.0.1") => [dep("bad"),]), + pkg!(("BA-sys", "0.0.2") => [dep("bad"),]), + pkg!("nA" => [ + dep("A"), + dep_req("A-sys", "<= 0.0.3"), + dep("B-sys"), + dep("BA-sys"), + ]), + ]; + let reg = registry(input); + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(vec![dep("nA")], ®, &mut sat_resolver).is_err()); +} + +#[test] +fn bad_lockfile_from_8249() { + let input = vec![ + pkg!(("a-sys", "0.2.0")), + pkg!(("a-sys", "0.1.0")), + pkg!(("b", "0.1.0") => [ + dep_req("a-sys", "0.1"), // should be optional: true, but not needed for now + ]), + pkg!(("c", "1.0.0") => [ + dep_req("b", "=0.1.0"), + ]), + pkg!("foo" => [ + dep_req("a-sys", "=0.2.0"), + { + let mut b = dep_req("b", "=0.1.0"); + b.set_features(vec!["a-sys"]); + b + }, + dep_req("c", "=1.0.0"), + ]), + ]; + let reg = registry(input); + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(vec![dep("foo")], ®, &mut sat_resolver).is_err()); +} + +#[test] +fn registry_with_features() { + let reg = registry(vec![ + pkg!("a"), + pkg!("b"), + pkg_dep_with( + "image", + vec!["a".opt(), "b".opt(), "jpg".to_dep()], + &[("default", &["a"]), ("b", &["dep:b"])], + ), + pkg!("jpg"), + pkg!("log"), + pkg!("man"), + pkg_dep_with("rgb", vec!["man".opt()], &[("man", &["dep:man"])]), + pkg_dep_with( + "dep", + vec!["image".with(&["b"]), "log".opt(), "rgb".opt()], + &[ + ("default", &["log", "image/default"]), + ("man", &["rgb?/man"]), + ], + ), + ]); + + for deps in [ + vec!["dep".with(&["default", "man", "log", "rgb"])], + vec!["dep".with(&["default"])], + vec!["dep".with(&[])], + vec!["dep".with(&["man"])], + vec!["dep".with(&["man", "rgb"])], + ] { + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_ok()); + } +} + +#[test] +fn missing_feature() { + let reg = registry(vec![pkg!("a")]); + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(vec!["a".with(&["f"])], ®, &mut sat_resolver).is_err()); +} + +#[test] +fn missing_dep_feature() { + let reg = registry(vec![ + pkg("a"), + pkg_dep_with("dep", vec![dep("a")], &[("f", &["a/a"])]), + ]); + + let deps = vec![dep("dep").with(&["f"])]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_err()); +} + +#[test] +fn missing_weak_dep_feature() { + let reg = registry(vec![ + pkg("a"), + pkg_dep_with("dep1", vec![dep("a")], &[("f", &["a/a"])]), + pkg_dep_with("dep2", vec!["a".opt()], &[("f", &["a/a"])]), + pkg_dep_with("dep3", vec!["a".opt()], &[("f", &["a?/a"])]), + pkg_dep_with("dep4", vec!["x".opt()], &[("f", &["x?/a"])]), + ]); + + let deps = vec![dep("dep1").with(&["f"])]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_err()); + + let deps = vec![dep("dep2").with(&["f"])]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_err()); + + let deps = vec![dep("dep2").with(&["a", "f"])]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_err()); + + // Weak dependencies are not supported yet in the dependency resolver + let deps = vec![dep("dep3").with(&["f"])]; + assert!(resolve(deps.clone(), ®).is_err()); + assert!(SatResolver::new(®).sat_resolve(&deps)); + + let deps = vec![dep("dep3").with(&["a", "f"])]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_err()); + + // Weak dependencies are not supported yet in the dependency resolver + let deps = vec![dep("dep4").with(&["f"])]; + assert!(resolve(deps.clone(), ®).is_err()); + assert!(SatResolver::new(®).sat_resolve(&deps)); + + let deps = vec![dep("dep4").with(&["x", "f"])]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_err()); +} + +#[test] +fn conflict_feature_and_sys() { + let reg = registry(vec![ + pkg(("a-sys", "1.0.0")), + pkg(("a-sys", "2.0.0")), + pkg_dep_with( + ("a", "1.0.0"), + vec![dep_req("a-sys", "1.0.0")], + &[("f", &[])], + ), + pkg_dep_with( + ("a", "2.0.0"), + vec![dep_req("a-sys", "2.0.0")], + &[("g", &[])], + ), + pkg_dep("dep", vec![dep_req("a", "2.0.0")]), + ]); + + let deps = vec![dep_req("a", "*").with(&["f"]), dep("dep")]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_err()); +} + +#[test] +fn conflict_weak_features() { + let reg = registry(vec![ + pkg(("a-sys", "1.0.0")), + pkg(("a-sys", "2.0.0")), + pkg_dep("a1", vec![dep_req("a-sys", "1.0.0").opt()]), + pkg_dep("a2", vec![dep_req("a-sys", "2.0.0").opt()]), + pkg_dep_with( + "dep", + vec!["a1".opt(), "a2".opt()], + &[("a1", &["a1?/a-sys"]), ("a2", &["a2?/a-sys"])], + ), + ]); + + let deps = vec![dep("dep").with(&["a1", "a2"])]; + + // Weak dependencies are not supported yet in the dependency resolver + assert!(resolve(deps.clone(), ®).is_err()); + assert!(SatResolver::new(®).sat_resolve(&deps)); +} + +#[test] +fn multiple_dep_kinds_and_targets() { + let reg = registry(vec![ + pkg(("a-sys", "1.0.0")), + pkg(("a-sys", "2.0.0")), + pkg_dep_with( + "dep1", + vec![ + dep_req_platform("a-sys", "1.0.0", "cfg(all())").opt(), + dep_req("a-sys", "1.0.0").opt(), + dep_req_kind("a-sys", "2.0.0", DepKind::Build).opt(), + ], + &[("default", &["dep:a-sys"])], + ), + pkg_dep_with( + "dep2", + vec![ + dep_req_platform("a-sys", "1.0.0", "cfg(all())").opt(), + dep_req("a-sys", "1.0.0").opt(), + dep_req_kind("a-sys", "2.0.0", DepKind::Development).rename("a-sys-dev"), + ], + &[("default", &["dep:a-sys", "a-sys-dev/bad"])], + ), + ]); + + let deps = vec![dep("dep1")]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_err()); + + let deps = vec![dep("dep2")]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_ok()); + + let deps = vec![ + dep_req("a-sys", "1.0.0"), + dep_req_kind("a-sys", "2.0.0", DepKind::Build).rename("a2"), + ]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_err()); + + let deps = vec![ + dep_req("a-sys", "1.0.0"), + dep_req_kind("a-sys", "2.0.0", DepKind::Development).rename("a2"), + ]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_err()); +} + +#[test] +fn multiple_dep_kinds_and_targets_with_different_packages() { + let reg = registry(vec![ + pkg_dep_with("a", vec![], &[("f", &[])]), + pkg_dep_with("b", vec![], &[("f", &[])]), + pkg_dep_with("c", vec![], &[("g", &[])]), + pkg_dep_with( + "dep1", + vec![ + "a".opt().rename("x").with(&["f"]), + dep_platform("a", "cfg(all())").opt().rename("x"), + dep_kind("b", DepKind::Build).opt().rename("x").with(&["f"]), + ], + &[("default", &["x"])], + ), + pkg_dep_with( + "dep2", + vec![ + "a".opt().rename("x").with(&["f"]), + dep_platform("a", "cfg(all())").opt().rename("x"), + dep_kind("c", DepKind::Build).opt().rename("x").with(&["f"]), + ], + &[("default", &["x"])], + ), + ]); + + let deps = vec!["dep1".with(&["default"])]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_ok()); + + let deps = vec!["dep2".with(&["default"])]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_err()); +} + +#[test] +fn dep_feature_with_shadowing_feature() { + let reg = registry(vec![ + pkg_dep_with("a", vec![], &[("b", &[])]), + pkg_dep_with( + "dep", + vec!["a".opt().rename("aa"), "c".opt()], + &[("default", &["aa/b"]), ("aa", &["c"])], + ), + ]); + + let deps = vec!["dep".with(&["default"])]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_err()); +} + +#[test] +fn dep_feature_not_optional_with_shadowing_feature() { + let reg = registry(vec![ + pkg_dep_with("a", vec![], &[("b", &[])]), + pkg_dep_with( + "dep", + vec!["a".rename("aa"), "c".opt()], + &[("default", &["aa/b"]), ("aa", &["c"])], + ), + ]); + + let deps = vec!["dep".with(&["default"])]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_ok()); +} + +#[test] +fn dep_feature_weak_with_shadowing_feature() { + let reg = registry(vec![ + pkg_dep_with("a", vec![], &[("b", &[])]), + pkg_dep_with( + "dep", + vec!["a".opt().rename("aa"), "c".opt()], + &[("default", &["aa?/b"]), ("aa", &["c"])], + ), + ]); + + let deps = vec!["dep".with(&["default"])]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_ok()); +} + +#[test] +fn dep_feature_duplicate_with_shadowing_feature() { + let reg = registry(vec![ + pkg_dep_with("a", vec![], &[("b", &[])]), + pkg_dep_with( + "dep", + vec![ + "a".opt().rename("aa"), + dep_kind("a", DepKind::Build).rename("aa"), + "c".opt(), + ], + &[("default", &["aa/b"]), ("aa", &["c"])], + ), + ]); + + let deps = vec!["dep".with(&["default"])]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_err()); +} + +#[test] +fn optional_dep_features() { + let reg = registry(vec![ + pkg_dep("a", vec!["bad".opt()]), + pkg_dep("b", vec!["a".opt().with(&["bad"])]), + pkg_dep("dep", vec![dep("a"), dep("b")]), + ]); + + let deps = vec![dep("dep")]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_ok()); +} + +#[test] +fn optional_dep_features_with_rename() { + let reg = registry(vec![ + pkg("x1"), + pkg_dep("a", vec!["x1".opt(), "x2".opt(), "x3".opt()]), + pkg_dep( + "dep1", + vec![ + "a".opt().with(&["x1"]), + dep_kind("a", DepKind::Build).opt().with(&["x2"]), + ], + ), + pkg_dep( + "dep2", + vec![ + "a".opt().with(&["x1"]), + "a".opt().rename("a2").with(&["x3"]), + ], + ), + ]); + + let deps = vec!["dep1".with(&["a"])]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_err()); + + let deps = vec!["dep2".with(&["a"])]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_ok()); +} + +#[test] +fn optional_weak_dep_features() { + let reg = registry(vec![ + pkg_dep("a", vec!["bad".opt()]), + pkg_dep("b", vec![dep("a")]), + pkg_dep_with("dep", vec!["a".opt(), dep("b")], &[("f", &["a?/bad"])]), + ]); + + let deps = vec!["dep".with(&["f"])]; + + // Weak dependencies are not supported yet in the dependency resolver + assert!(resolve(deps.clone(), ®).is_err()); + assert!(SatResolver::new(®).sat_resolve(&deps)); +} + +#[test] +fn default_feature_multiple_major_versions() { + let reg = registry(vec![ + pkg_dep_with(("a", "0.2.0"), vec![], &[("default", &[])]), + pkg(("a", "0.3.0")), + pkg_dep_with(("a", "0.4.0"), vec![], &[("default", &[])]), + pkg_dep( + "dep1", + vec![ + dep_req("a", ">=0.2, <0.4").with_default(), + dep_req("a", "0.2").rename("a2").with(&[]), + ], + ), + pkg_dep( + "dep2", + vec![ + dep_req("a", ">=0.2, <0.4").with_default(), + dep_req("a", "0.3").rename("a2").with(&[]), + ], + ), + pkg_dep( + "dep3", + vec![ + dep_req("a", ">=0.2, <0.4").with_default(), + dep_req("a", "0.2").rename("a1").with(&[]), + dep_req("a", "0.3").rename("a2").with(&[]), + ], + ), + pkg_dep("dep4", vec![dep_req("a", ">=0.2, <0.4").with_default()]), + pkg_dep("dep5", vec![dep_req("a", ">=0.3, <0.5").with_default()]), + ]); + + let deps = vec![dep("dep1")]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_ok()); + + let deps = vec![dep("dep2")]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_ok()); + + let deps = vec![dep("dep3")]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_ok()); + + let deps = vec![dep("dep4")]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_ok()); + + let deps = vec![dep("dep5")]; + let mut sat_resolver = SatResolver::new(®); + assert!(resolve_and_validated(deps, ®, &mut sat_resolver).is_ok()); +} diff --git a/crates/rustfix/Cargo.toml b/crates/rustfix/Cargo.toml index 533d028d95d..7b3d95a401a 100644 --- a/crates/rustfix/Cargo.toml +++ b/crates/rustfix/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "rustfix" -version = "0.8.3" +version = "0.8.7" authors = [ "Pascal Hertleif ", "Oliver Schneider ", @@ -26,7 +26,7 @@ tracing.workspace = true [dev-dependencies] anyhow.workspace = true proptest.workspace = true -similar = "2.5.0" +similar.workspace = true tempfile.workspace = true tracing-subscriber.workspace = true diff --git a/crates/rustfix/README.md b/crates/rustfix/README.md index 0546e6018f9..70ec423c757 100644 --- a/crates/rustfix/README.md +++ b/crates/rustfix/README.md @@ -9,6 +9,10 @@ This is a low-level library. You pass it the JSON output from `rustc`, and you c If you are looking for the [`cargo fix`] implementation, the core of it is located in [`cargo::ops::fix`]. +> This crate is maintained by the Cargo team, primarily for use by Cargo and Rust compiler test suite +> and not intended for external use (except as a transitive dependency). This +> crate may make major changes to its APIs or be deprecated without warning. + [`cargo fix`]: https://doc.rust-lang.org/cargo/commands/cargo-fix.html [`cargo::ops::fix`]: https://github.com/rust-lang/cargo/blob/master/src/cargo/ops/fix.rs diff --git a/crates/rustfix/src/lib.rs b/crates/rustfix/src/lib.rs index 2f6acaf3028..a1e2c47cfeb 100644 --- a/crates/rustfix/src/lib.rs +++ b/crates/rustfix/src/lib.rs @@ -18,6 +18,10 @@ //! 3. Create a [`CodeFix`] with the source of a file to modify. //! 4. Call [`CodeFix::apply`] to apply a change. //! 5. Call [`CodeFix::finish`] to get the result and write it back to disk. +//! +//! > This crate is maintained by the Cargo team, primarily for use by Cargo and Rust compiler test suite +//! > and not intended for external use (except as a transitive dependency). This +//! > crate may make major changes to its APIs or be deprecated without warning. use std::collections::HashSet; use std::ops::Range; @@ -213,6 +217,7 @@ pub fn collect_suggestions( /// 1. Feeds the source of a file to [`CodeFix::new`]. /// 2. Calls [`CodeFix::apply`] to apply suggestions to the source code. /// 3. Calls [`CodeFix::finish`] to get the "fixed" code. +#[derive(Clone)] pub struct CodeFix { data: replace::Data, /// Whether or not the data has been modified. @@ -230,12 +235,18 @@ impl CodeFix { /// Applies a suggestion to the code. pub fn apply(&mut self, suggestion: &Suggestion) -> Result<(), Error> { - for sol in &suggestion.solutions { - for r in &sol.replacements { - self.data - .replace_range(r.snippet.range.clone(), r.replacement.as_bytes())?; - self.modified = true; - } + for solution in &suggestion.solutions { + self.apply_solution(solution)?; + } + Ok(()) + } + + /// Applies an individual solution from a [`Suggestion`]. + pub fn apply_solution(&mut self, solution: &Solution) -> Result<(), Error> { + for r in &solution.replacements { + self.data + .replace_range(r.snippet.range.clone(), r.replacement.as_bytes())?; + self.modified = true; } Ok(()) } diff --git a/crates/rustfix/tests/everything/multiple-solutions.fixed.rs b/crates/rustfix/tests/everything/multiple-solutions.nightly.fixed.rs similarity index 54% rename from crates/rustfix/tests/everything/multiple-solutions.fixed.rs rename to crates/rustfix/tests/everything/multiple-solutions.nightly.fixed.rs index 1a261785d77..50ccab26b81 100644 --- a/crates/rustfix/tests/everything/multiple-solutions.fixed.rs +++ b/crates/rustfix/tests/everything/multiple-solutions.nightly.fixed.rs @@ -1,4 +1,4 @@ -use std::collections::{HashSet}; +use std::collections::HashSet; fn main() { let _: HashSet<()>; diff --git a/crates/rustfix/tests/everything/multiple-solutions.json b/crates/rustfix/tests/everything/multiple-solutions.nightly.json similarity index 55% rename from crates/rustfix/tests/everything/multiple-solutions.json rename to crates/rustfix/tests/everything/multiple-solutions.nightly.json index 89b14ccc848..ce9d54e4a84 100644 --- a/crates/rustfix/tests/everything/multiple-solutions.json +++ b/crates/rustfix/tests/everything/multiple-solutions.nightly.json @@ -1,4 +1,5 @@ { + "$message_type": "diagnostic", "message": "unused imports: `HashMap`, `VecDeque`", "code": { "code": "unused_imports", @@ -7,7 +8,7 @@ "level": "warning", "spans": [ { - "file_name": "src/main.rs", + "file_name": "./tests/everything/multiple-solutions.nightly.rs", "byte_start": 23, "byte_end": 30, "line_start": 1, @@ -28,7 +29,7 @@ "expansion": null }, { - "file_name": "src/main.rs", + "file_name": "./tests/everything/multiple-solutions.nightly.rs", "byte_start": 41, "byte_end": 49, "line_start": 1, @@ -51,7 +52,7 @@ ], "children": [ { - "message": "#[warn(unused_imports)] on by default", + "message": "`#[warn(unused_imports)]` on by default", "code": null, "level": "note", "spans": [], @@ -64,7 +65,28 @@ "level": "help", "spans": [ { - "file_name": "src/main.rs", + "file_name": "./tests/everything/multiple-solutions.nightly.rs", + "byte_start": 22, + "byte_end": 23, + "line_start": 1, + "line_end": 1, + "column_start": 23, + "column_end": 24, + "is_primary": true, + "text": [ + { + "text": "use std::collections::{HashMap, HashSet, VecDeque};", + "highlight_start": 23, + "highlight_end": 24 + } + ], + "label": null, + "suggested_replacement": "", + "suggestion_applicability": "MachineApplicable", + "expansion": null + }, + { + "file_name": "./tests/everything/multiple-solutions.nightly.rs", "byte_start": 23, "byte_end": 32, "line_start": 1, @@ -85,7 +107,7 @@ "expansion": null }, { - "file_name": "src/main.rs", + "file_name": "./tests/everything/multiple-solutions.nightly.rs", "byte_start": 39, "byte_end": 49, "line_start": 1, @@ -104,11 +126,41 @@ "suggested_replacement": "", "suggestion_applicability": "MachineApplicable", "expansion": null + }, + { + "file_name": "./tests/everything/multiple-solutions.nightly.rs", + "byte_start": 49, + "byte_end": 50, + "line_start": 1, + "line_end": 1, + "column_start": 50, + "column_end": 51, + "is_primary": true, + "text": [ + { + "text": "use std::collections::{HashMap, HashSet, VecDeque};", + "highlight_start": 50, + "highlight_end": 51 + } + ], + "label": null, + "suggested_replacement": "", + "suggestion_applicability": "MachineApplicable", + "expansion": null } ], "children": [], "rendered": null } ], - "rendered": "warning: unused imports: `HashMap`, `VecDeque`\n --> src/main.rs:1:24\n |\n1 | use std::collections::{HashMap, HashSet, VecDeque};\n | ^^^^^^^ ^^^^^^^^\n |\n = note: #[warn(unused_imports)] on by default\nhelp: remove the unused imports\n |\n1 | use std::collections::{HashSet};\n | -- --\n\n" + "rendered": "warning: unused imports: `HashMap`, `VecDeque`\n --> ./tests/everything/multiple-solutions.nightly.rs:1:24\n |\n1 | use std::collections::{HashMap, HashSet, VecDeque};\n | ^^^^^^^ ^^^^^^^^\n |\n = note: `#[warn(unused_imports)]` on by default\n\n" +} +{ + "$message_type": "diagnostic", + "message": "1 warning emitted", + "code": null, + "level": "warning", + "spans": [], + "children": [], + "rendered": "warning: 1 warning emitted\n\n" } diff --git a/crates/rustfix/tests/everything/multiple-solutions.rs b/crates/rustfix/tests/everything/multiple-solutions.nightly.rs similarity index 100% rename from crates/rustfix/tests/everything/multiple-solutions.rs rename to crates/rustfix/tests/everything/multiple-solutions.nightly.rs diff --git a/crates/rustfix/tests/parse_and_replace.rs b/crates/rustfix/tests/parse_and_replace.rs index 949903b238f..0c7e64a3b2a 100644 --- a/crates/rustfix/tests/parse_and_replace.rs +++ b/crates/rustfix/tests/parse_and_replace.rs @@ -45,6 +45,26 @@ mod settings { pub const BLESS: &str = "RUSTFIX_TEST_BLESS"; } +static mut VERSION: (u32, bool) = (0, false); + +// Temporarily copy from `cargo_test_macro::version`. +fn version() -> (u32, bool) { + static INIT: std::sync::Once = std::sync::Once::new(); + INIT.call_once(|| { + let output = Command::new("rustc") + .arg("-V") + .output() + .expect("cargo should run"); + let stdout = std::str::from_utf8(&output.stdout).expect("utf8"); + let vers = stdout.split_whitespace().skip(1).next().unwrap(); + let is_nightly = option_env!("CARGO_TEST_DISABLE_NIGHTLY").is_none() + && (vers.contains("-nightly") || vers.contains("-dev")); + let minor = vers.split('.').skip(1).next().unwrap().parse().unwrap(); + unsafe { VERSION = (minor, is_nightly) } + }); + unsafe { VERSION } +} + fn compile(file: &Path) -> Result { let tmp = tempdir()?; @@ -144,8 +164,8 @@ fn test_rustfix_with_file>(file: P, mode: &str) -> Result<(), Err debug!("next up: {:?}", file); let code = fs::read_to_string(file)?; - let errors = - compile_and_get_json_errors(file).context(format!("could compile {}", file.display()))?; + let errors = compile_and_get_json_errors(file) + .with_context(|| format!("could not compile {}", file.display()))?; let suggestions = rustfix::get_suggestions_from_json(&errors, &HashSet::new(), filter_suggestions) .context("could not load suggestions")?; @@ -155,10 +175,8 @@ fn test_rustfix_with_file>(file: P, mode: &str) -> Result<(), Err } if std::env::var(settings::CHECK_JSON).is_ok() { - let expected_json = fs::read_to_string(&json_file).context(format!( - "could not load json fixtures for {}", - file.display() - ))?; + let expected_json = fs::read_to_string(&json_file) + .with_context(|| format!("could not load json fixtures for {}", file.display()))?; let expected_suggestions = rustfix::get_suggestions_from_json(&expected_json, &HashSet::new(), filter_suggestions) .context("could not load expected suggestions")?; @@ -174,7 +192,7 @@ fn test_rustfix_with_file>(file: P, mode: &str) -> Result<(), Err } let fixed = apply_suggestions(&code, &suggestions) - .context(format!("could not apply suggestions to {}", file.display()))? + .with_context(|| format!("could not apply suggestions to {}", file.display()))? .replace('\r', ""); if std::env::var(settings::RECORD_FIXED_RUST).is_ok() { @@ -189,7 +207,7 @@ fn test_rustfix_with_file>(file: P, mode: &str) -> Result<(), Err } let expected_fixed = fs::read_to_string(&fixed_file) - .context(format!("could read fixed file for {}", file.display()))? + .with_context(|| format!("could read fixed file for {}", file.display()))? .replace('\r', ""); ensure!( fixed.trim() == expected_fixed.trim(), @@ -216,11 +234,24 @@ fn get_fixture_files(p: &str) -> Result, Error> { fn assert_fixtures(dir: &str, mode: &str) { let files = get_fixture_files(dir) - .context(format!("couldn't load dir `{}`", dir)) + .with_context(|| format!("couldn't load dir `{dir}`")) .unwrap(); let mut failures = 0; + let is_not_nightly = !version().1; + for file in &files { + if file + .file_stem() + .unwrap() + .to_str() + .unwrap() + .ends_with(".nightly") + && is_not_nightly + { + info!("skipped: {file:?}"); + continue; + } if let Err(err) = test_rustfix_with_file(file, mode) { println!("failed: {}", file.display()); warn!("{:?}", err); diff --git a/crates/xtask-build-man/src/main.rs b/crates/xtask-build-man/src/main.rs index 2ab3f098aaa..646610d5fa5 100644 --- a/crates/xtask-build-man/src/main.rs +++ b/crates/xtask-build-man/src/main.rs @@ -45,14 +45,14 @@ fn build_mdman() -> io::Result<()> { /// saved in the src/doc/src/commands/ directory. These are included in the /// Cargo book, which is converted to HTML by mdbook. fn build_cargo() -> io::Result<()> { - // Find all `src/doc/man/cargo-*.md` + // Find all `src/doc/man/cargo*.md` let src_paths = { let mut src_paths = Vec::new(); for entry in fs::read_dir("src/doc/man")? { let entry = entry?; let file_name = entry.file_name(); let file_name = file_name.to_str().unwrap(); - if file_name.starts_with("cargo-") && file_name.ends_with(".md") { + if file_name.starts_with("cargo") && file_name.ends_with(".md") { src_paths.push(entry.path()); } } diff --git a/crates/xtask-bump-check/Cargo.toml b/crates/xtask-bump-check/Cargo.toml index 44ce34141bf..8fc13c77bd1 100644 --- a/crates/xtask-bump-check/Cargo.toml +++ b/crates/xtask-bump-check/Cargo.toml @@ -16,3 +16,6 @@ tracing.workspace = true [lints] workspace = true + +[lints.rust] +unexpected_cfgs = { level = "warn", check-cfg = ['cfg(target_os, values("solana"))'] } diff --git a/crates/xtask-bump-check/src/xtask.rs b/crates/xtask-bump-check/src/xtask.rs index a53a0a8bcfb..db521bdd7a2 100644 --- a/crates/xtask-bump-check/src/xtask.rs +++ b/crates/xtask-bump-check/src/xtask.rs @@ -16,7 +16,6 @@ use std::fs; use std::task; use cargo::core::dependency::Dependency; -use cargo::core::registry::PackageRegistry; use cargo::core::Package; use cargo::core::Registry; use cargo::core::SourceId; @@ -118,17 +117,26 @@ fn bump_check(args: &clap::ArgMatches, gctx: &cargo::util::GlobalContext) -> Car let changed_members = changed(&ws, &repo, &base_commit, &head_commit)?; let status = |msg: &str| gctx.shell().status(STATUS, msg); - // Don't check against beta and stable branches, - // as the publish of these crates are not tied with Rust release process. - // See `TO_PUBLISH` in publish.py. - let crates_not_check_against_channels = ["home"]; + let crates_not_check_against_channels = [ + // High false positive rate between beta branch and requisite version bump soon after + // + // Low risk because we always bump the "major" version after beta branch; we are + // only losing out on checks for patch releases. + // + // Note: this is already skipped in `changed` + "cargo", + // Don't check against beta and stable branches, + // as the publish of these crates are not tied with Rust release process. + // See `TO_PUBLISH` in publish.py. + "home", + ]; status(&format!("base commit `{}`", base_commit.id()))?; status(&format!("head commit `{}`", head_commit.id()))?; let mut needs_bump = Vec::new(); - check_crates_io(gctx, &changed_members, &mut needs_bump)?; + check_crates_io(&ws, &changed_members, &mut needs_bump)?; if let Some(referenced_commit) = referenced_commit.as_ref() { status(&format!("compare against `{}`", referenced_commit.id()))?; @@ -168,18 +176,17 @@ fn bump_check(args: &clap::ArgMatches, gctx: &cargo::util::GlobalContext) -> Car let mut cmd = ProcessBuilder::new("cargo"); cmd.arg("semver-checks") .arg("check-release") - .args(&["--exclude", "cargo-test-macro"]) // FIXME: Remove once 1.79 is stable. - .args(&["--exclude", "cargo-test-support"]) // FIXME: Remove once 1.79 is stable. .arg("--workspace"); gctx.shell().status("Running", &cmd)?; cmd.exec()?; + // This test does not work for Solana because we are running + // it against a too old version of cargo. + #[cfg(target_os = "solana")] if let Some(referenced_commit) = referenced_commit.as_ref() { let mut cmd = ProcessBuilder::new("cargo"); cmd.arg("semver-checks") .arg("--workspace") - .args(&["--exclude", "cargo-test-macro"]) // FIXME: Remove once 1.79 is stable. - .args(&["--exclude", "cargo-test-support"]) // FIXME: Remove once 1.79 is stable. .arg("--baseline-rev") .arg(referenced_commit.id().to_string()); for krate in crates_not_check_against_channels { @@ -293,8 +300,8 @@ fn beta_and_stable_branch(repo: &git2::Repository) -> CargoResult<[git2::Branch< for branch in repo.branches(Some(git2::BranchType::Remote))? { let (branch, _) = branch?; let name = branch.name()?.unwrap(); - let Some((_, version)) = name.split_once("/rust-") else { - tracing::trace!("branch `{name}` is not in the format of `/rust-`"); + let Some((_, version)) = name.split_once("/solana-") else { + tracing::trace!("branch `{name}` is not in the format of `/solana-`"); continue; }; let Ok(version) = version.parse::() else { @@ -376,12 +383,13 @@ fn symmetric_diff<'a>( /// /// Assumption: We always release a version larger than all existing versions. fn check_crates_io<'a>( - gctx: &GlobalContext, + ws: &Workspace<'a>, changed_members: &HashMap<&'a str, &'a Package>, needs_bump: &mut Vec<&'a Package>, ) -> CargoResult<()> { + let gctx = ws.gctx(); let source_id = SourceId::crates_io(gctx)?; - let mut registry = PackageRegistry::new(gctx)?; + let mut registry = ws.package_registry()?; let _lock = gctx.acquire_package_cache_lock(CacheLockMode::DownloadExclusive)?; registry.lock_patches(); gctx.shell().status( diff --git a/crates/xtask-lint-docs/Cargo.toml b/crates/xtask-lint-docs/Cargo.toml new file mode 100644 index 00000000000..16f15d2efe7 --- /dev/null +++ b/crates/xtask-lint-docs/Cargo.toml @@ -0,0 +1,14 @@ +[package] +name = "xtask-lint-docs" +version = "0.1.0" +edition.workspace = true +publish = false + +[dependencies] +anyhow.workspace = true +cargo.workspace = true +clap.workspace = true +itertools.workspace = true + +[lints] +workspace = true diff --git a/crates/xtask-lint-docs/src/main.rs b/crates/xtask-lint-docs/src/main.rs new file mode 100644 index 00000000000..79862d6c400 --- /dev/null +++ b/crates/xtask-lint-docs/src/main.rs @@ -0,0 +1,108 @@ +use cargo::util::command_prelude::{flag, ArgMatchesExt}; +use cargo::util::lints::{Lint, LintLevel}; +use itertools::Itertools; +use std::fmt::Write; +use std::path::PathBuf; + +fn cli() -> clap::Command { + clap::Command::new("xtask-lint-docs").arg(flag("check", "Check that the docs are up-to-date")) +} + +fn main() -> anyhow::Result<()> { + let args = cli().get_matches(); + let check = args.flag("check"); + + let mut allow = Vec::new(); + let mut warn = Vec::new(); + let mut deny = Vec::new(); + let mut forbid = Vec::new(); + + let mut lint_docs = String::new(); + for lint in cargo::util::lints::LINTS + .iter() + .sorted_by_key(|lint| lint.name) + { + if lint.docs.is_some() { + let sectipn = match lint.default_level { + LintLevel::Allow => &mut allow, + LintLevel::Warn => &mut warn, + LintLevel::Deny => &mut deny, + LintLevel::Forbid => &mut forbid, + }; + sectipn.push(lint.name); + add_lint(lint, &mut lint_docs)?; + } + } + + let mut buf = String::new(); + writeln!(buf, "# Lints\n")?; + writeln!( + buf, + "Note: [Cargo's linting system is unstable](unstable.md#lintscargo) and can only be used on nightly toolchains" + )?; + writeln!(buf)?; + + if !allow.is_empty() { + add_level_section(LintLevel::Allow, &allow, &mut buf)?; + } + if !warn.is_empty() { + add_level_section(LintLevel::Warn, &warn, &mut buf)?; + } + if !deny.is_empty() { + add_level_section(LintLevel::Deny, &deny, &mut buf)?; + } + if !forbid.is_empty() { + add_level_section(LintLevel::Forbid, &forbid, &mut buf)?; + } + + buf.push_str(&lint_docs); + + if check { + let old = std::fs::read_to_string(lint_docs_path())?; + if old != buf { + anyhow::bail!( + "The lints documentation is out-of-date. Run `cargo lint-docs` to update it." + ); + } + } else { + std::fs::write(lint_docs_path(), buf)?; + } + Ok(()) +} + +fn add_lint(lint: &Lint, buf: &mut String) -> std::fmt::Result { + writeln!(buf, "## `{}`", lint.name)?; + writeln!(buf, "Set to `{}` by default", lint.default_level)?; + writeln!(buf, "{}\n", lint.docs.as_ref().unwrap()) +} + +fn add_level_section(level: LintLevel, lint_names: &[&str], buf: &mut String) -> std::fmt::Result { + let title = match level { + LintLevel::Allow => "Allowed-by-default", + LintLevel::Warn => "Warn-by-default", + LintLevel::Deny => "Deny-by-default", + LintLevel::Forbid => "Forbid-by-default", + }; + writeln!(buf, "## {title}\n")?; + writeln!( + buf, + "These lints are all set to the '{}' level by default.", + level + )?; + + for name in lint_names { + writeln!(buf, "- [`{}`](#{})", name, name)?; + } + writeln!(buf)?; + Ok(()) +} + +fn lint_docs_path() -> PathBuf { + let pkg_root = env!("CARGO_MANIFEST_DIR"); + let ws_root = PathBuf::from(format!("{pkg_root}/../..")); + let path = { + let path = ws_root.join("src/doc/src/reference/lints.md"); + path.canonicalize().unwrap_or(path) + }; + path +} diff --git a/credential/cargo-credential-1password/Cargo.toml b/credential/cargo-credential-1password/Cargo.toml index 144b44070db..b7ddc691d4e 100644 --- a/credential/cargo-credential-1password/Cargo.toml +++ b/credential/cargo-credential-1password/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "cargo-credential-1password" -version = "0.4.4" +version = "0.4.5" rust-version.workspace = true edition.workspace = true license.workspace = true diff --git a/credential/cargo-credential-1password/README.md b/credential/cargo-credential-1password/README.md index fc3c9460a56..74bf4c4eeb0 100644 --- a/credential/cargo-credential-1password/README.md +++ b/credential/cargo-credential-1password/README.md @@ -2,6 +2,11 @@ A Cargo [credential provider] for [1password]. +> This crate is maintained by the Cargo team as a part of an experiment around +> 1password integration. We encourage people to try to use this crate in their projects and +> provide feedback through [issues](https://github.com/rust-lang/cargo/issues/), but do not +> guarantee long term maintenance. + ## Usage `cargo-credential-1password` uses the 1password `op` CLI to store the token. You diff --git a/credential/cargo-credential-1password/src/main.rs b/credential/cargo-credential-1password/src/main.rs index 38b567bf2d4..0cb720b36aa 100644 --- a/credential/cargo-credential-1password/src/main.rs +++ b/credential/cargo-credential-1password/src/main.rs @@ -1,4 +1,9 @@ //! Cargo registry 1password credential process. +//! +//! > This crate is maintained by the Cargo team as a part of an experiment around +//! > 1password integration. We encourage people to try to use this crate in their projects and +//! > provide feedback through [issues](https://github.com/rust-lang/cargo/issues/), but do not +//! > guarantee long term maintenance. #![allow(clippy::disallowed_methods)] #![allow(clippy::print_stderr)] diff --git a/credential/cargo-credential-libsecret/Cargo.toml b/credential/cargo-credential-libsecret/Cargo.toml index 522b9be383e..afd3af61293 100644 --- a/credential/cargo-credential-libsecret/Cargo.toml +++ b/credential/cargo-credential-libsecret/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "cargo-credential-libsecret" -version = "0.4.5" -rust-version = "1.77" # MSRV:1 +version = "0.4.9" +rust-version = "1.81" # MSRV:1 edition.workspace = true license.workspace = true homepage.workspace = true diff --git a/credential/cargo-credential-libsecret/README.md b/credential/cargo-credential-libsecret/README.md index aaba2887f41..a6af3b7575a 100644 --- a/credential/cargo-credential-libsecret/README.md +++ b/credential/cargo-credential-libsecret/README.md @@ -5,5 +5,9 @@ See the [credential-provider] documentation for how to use this. This credential provider is built-in to cargo as `cargo:libsecret`. +> This crate is maintained by the Cargo team, primarily for use by Cargo +> and not intended for external use (except as a transitive dependency). This +> crate may make major changes to its APIs or be deprecated without warning. + [GNOME libsecret]: https://wiki.gnome.org/Projects/Libsecret [credential-provider]: https://doc.rust-lang.org/nightly/cargo/reference/registry-authentication.html diff --git a/credential/cargo-credential-libsecret/src/lib.rs b/credential/cargo-credential-libsecret/src/lib.rs index ee179760539..0d10fb83689 100644 --- a/credential/cargo-credential-libsecret/src/lib.rs +++ b/credential/cargo-credential-libsecret/src/lib.rs @@ -1,3 +1,7 @@ +//! > This crate is maintained by the Cargo team, primarily for use by Cargo +//! > and not intended for external use (except as a transitive dependency). This +//! > crate may make major changes to its APIs or be deprecated without warning. + #[cfg(target_os = "linux")] mod linux { //! Implementation of the libsecret credential helper. @@ -115,7 +119,7 @@ mod linux { let secret_password_store_sync: Symbol<'_, SecretPasswordStoreSync>; let secret_password_clear_sync: Symbol<'_, SecretPasswordClearSync>; unsafe { - lib = Library::new("libsecret-1.so").context( + lib = Library::new("libsecret-1.so.0").context( "failed to load libsecret: try installing the `libsecret` \ or `libsecret-1-0` package with the system package manager", )?; diff --git a/credential/cargo-credential-macos-keychain/Cargo.toml b/credential/cargo-credential-macos-keychain/Cargo.toml index 2465120451b..f7cd1a47e54 100644 --- a/credential/cargo-credential-macos-keychain/Cargo.toml +++ b/credential/cargo-credential-macos-keychain/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "cargo-credential-macos-keychain" -version = "0.4.5" -rust-version = "1.77" # MSRV:1 +version = "0.4.9" +rust-version = "1.81" # MSRV:1 edition.workspace = true license.workspace = true homepage.workspace = true diff --git a/credential/cargo-credential-macos-keychain/README.md b/credential/cargo-credential-macos-keychain/README.md index f5efe496b8f..28cabf4206d 100644 --- a/credential/cargo-credential-macos-keychain/README.md +++ b/credential/cargo-credential-macos-keychain/README.md @@ -5,6 +5,10 @@ See the [credential-provider] documentation for how to use this. This credential provider is built-in to cargo as `cargo:macos-keychain`. +> This crate is maintained by the Cargo team, primarily for use by Cargo +> and not intended for external use (except as a transitive dependency). This +> crate may make major changes to its APIs or be deprecated without warning. + [macOS Keychain]: https://support.apple.com/guide/keychain-access/welcome/mac [credential-provider]: https://doc.rust-lang.org/nightly/cargo/reference/registry-authentication.html diff --git a/credential/cargo-credential-macos-keychain/src/lib.rs b/credential/cargo-credential-macos-keychain/src/lib.rs index 8a702a3620c..f6d253c7a58 100644 --- a/credential/cargo-credential-macos-keychain/src/lib.rs +++ b/credential/cargo-credential-macos-keychain/src/lib.rs @@ -1,4 +1,8 @@ //! Cargo registry macos keychain credential process. +//! +//! > This crate is maintained by the Cargo team, primarily for use by Cargo +//! > and not intended for external use (except as a transitive dependency). This +//! > crate may make major changes to its APIs or be deprecated without warning. #![allow(clippy::print_stderr)] diff --git a/credential/cargo-credential-wincred/Cargo.toml b/credential/cargo-credential-wincred/Cargo.toml index 3d0017ad2c7..12246789e4c 100644 --- a/credential/cargo-credential-wincred/Cargo.toml +++ b/credential/cargo-credential-wincred/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "cargo-credential-wincred" -version = "0.4.5" -rust-version = "1.77" # MSRV:1 +version = "0.4.9" +rust-version = "1.81" # MSRV:1 edition.workspace = true license.workspace = true homepage.workspace = true diff --git a/credential/cargo-credential-wincred/README.md b/credential/cargo-credential-wincred/README.md index 1995e9d76cf..0cb6b951270 100644 --- a/credential/cargo-credential-wincred/README.md +++ b/credential/cargo-credential-wincred/README.md @@ -5,5 +5,9 @@ See the [credential-provider] documentation for how to use this. This credential provider is built-in to cargo as `cargo:wincred`. +> This crate is maintained by the Cargo team, primarily for use by Cargo +> and not intended for external use (except as a transitive dependency). This +> crate may make major changes to its APIs or be deprecated without warning. + [Windows Credential Manager]: https://support.microsoft.com/en-us/windows/accessing-credential-manager-1b5c916a-6a16-889f-8581-fc16e8165ac0 [credential-provider]: https://doc.rust-lang.org/nightly/cargo/reference/registry-authentication.html diff --git a/credential/cargo-credential-wincred/src/lib.rs b/credential/cargo-credential-wincred/src/lib.rs index 24b072ee28a..eb6ea390bfc 100644 --- a/credential/cargo-credential-wincred/src/lib.rs +++ b/credential/cargo-credential-wincred/src/lib.rs @@ -1,4 +1,8 @@ //! Cargo registry windows credential process. +//! +//! > This crate is maintained by the Cargo team, primarily for use by Cargo +//! > and not intended for external use (except as a transitive dependency). This +//! > crate may make major changes to its APIs or be deprecated without warning. #[cfg(windows)] mod win { diff --git a/credential/cargo-credential/Cargo.toml b/credential/cargo-credential/Cargo.toml index 4ed0d325352..b3da12d3938 100644 --- a/credential/cargo-credential/Cargo.toml +++ b/credential/cargo-credential/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "cargo-credential" -version = "0.4.5" +version = "0.4.7" rust-version.workspace = true edition.workspace = true license.workspace = true diff --git a/credential/cargo-credential/README.md b/credential/cargo-credential/README.md index d87d41bb8c8..8e4c9f94e74 100644 --- a/credential/cargo-credential/README.md +++ b/credential/cargo-credential/README.md @@ -10,6 +10,9 @@ https://doc.rust-lang.org/nightly/cargo/reference/credential-provider-protocol.h Example implementations may be found at https://github.com/rust-lang/cargo/tree/master/credential +> This crate is maintained by the Cargo team for use by the wider +> ecosystem. This crate follows semver compatibility for its APIs. + ## Usage Create a Cargo project with this as a dependency: diff --git a/credential/cargo-credential/src/lib.rs b/credential/cargo-credential/src/lib.rs index 0888fb402f7..c8e64b983af 100644 --- a/credential/cargo-credential/src/lib.rs +++ b/credential/cargo-credential/src/lib.rs @@ -12,6 +12,9 @@ //! While in the `perform` function, stdin and stdout will be re-attached to the //! active console. This allows credential providers to be interactive if necessary. //! +//! > This crate is maintained by the Cargo team for use by the wider +//! > ecosystem. This crate follows semver compatibility for its APIs. +//! //! ## Error handling //! ### [`Error::UrlNotSupported`] //! A credential provider may only support some registry URLs. If this is the case diff --git a/credential/cargo-credential/src/stdio.rs b/credential/cargo-credential/src/stdio.rs index 25435056f8a..5aa2e4e391c 100644 --- a/credential/cargo-credential/src/stdio.rs +++ b/credential/cargo-credential/src/stdio.rs @@ -143,7 +143,7 @@ mod test { #[test] fn stdin() { - let tempdir = snapbox::path::PathFixture::mutable_temp().unwrap(); + let tempdir = snapbox::dir::DirRoot::mutable_temp().unwrap(); let file = tempdir.path().unwrap().join("stdin"); let mut file = OpenOptions::new() .read(true) diff --git a/credential/cargo-credential/tests/examples.rs b/credential/cargo-credential/tests/examples.rs index 9c2604d1c0f..6844642baef 100644 --- a/credential/cargo-credential/tests/examples.rs +++ b/credential/cargo-credential/tests/examples.rs @@ -1,6 +1,7 @@ use std::path::Path; use snapbox::cmd::Command; +use snapbox::prelude::*; #[test] fn stdout_redirected() { @@ -14,8 +15,8 @@ fn stdout_redirected() { .stdin(format!("{get_request}\n")) .arg("--cargo-plugin") .assert() - .stdout_eq(format!("{hello}\n{err_not_supported}\n")) - .stderr_eq("message on stderr should be sent to the parent process\n") + .stdout_eq(format!("{hello}\n{err_not_supported}\n").raw()) + .stderr_eq("message on stderr should be sent to the parent process\n".raw()) .success(); } @@ -38,8 +39,8 @@ fn file_provider() { .stdin(format!("{login_request}\n{get_request}\n")) .arg("--cargo-plugin") .assert() - .stdout_eq(format!("{hello}\n{login_response}\n{get_response}\n")) - .stderr_eq("") + .stdout_eq(format!("{hello}\n{login_response}\n{get_response}\n").raw()) + .stderr_eq("".raw()) .success(); std::fs::remove_dir_all(&dir).unwrap(); } diff --git a/deny.toml b/deny.toml index 746113f3cae..56513f0d871 100644 --- a/deny.toml +++ b/deny.toml @@ -61,16 +61,7 @@ feature-depth = 1 db-path = "~/.cargo/advisory-db" # The url(s) of the advisory databases to use db-urls = ["https://github.com/rustsec/advisory-db"] -# The lint level for security vulnerabilities -vulnerability = "deny" -# The lint level for unmaintained crates -unmaintained = "warn" -# The lint level for crates that have been yanked from their source registry yanked = "warn" -# The lint level for crates with security notices. Note that as of -# 2019-12-17 there are no security notice advisories in -# https://github.com/rustsec/advisory-db -notice = "warn" # A list of advisory IDs to ignore. Note that ignored advisories will still # output a note when they are encountered. ignore = [ @@ -96,8 +87,6 @@ ignore = [ # More documentation for the licenses section can be found here: # https://embarkstudios.github.io/cargo-deny/checks/licenses/cfg.html [licenses] -# The lint level for crates which do not have a detectable license -unlicensed = "deny" # List of explicitly allowed licenses # See https://spdx.org/licenses/ for list of possible licenses # [possible values: any SPDX 3.11 short identifier (+ optional exception)]. @@ -105,32 +94,13 @@ allow = [ "MIT", "MIT-0", "Apache-2.0", + "BSD-2-Clause", "BSD-3-Clause", "MPL-2.0", "Unicode-DFS-2016", "CC0-1.0", "ISC", ] -# List of explicitly disallowed licenses -# See https://spdx.org/licenses/ for list of possible licenses -# [possible values: any SPDX 3.11 short identifier (+ optional exception)]. -deny = [ - #"Nokia", -] -# Lint level for licenses considered copyleft -copyleft = "warn" -# Blanket approval or denial for OSI-approved or FSF Free/Libre licenses -# * both - The license will be approved if it is both OSI-approved *AND* FSF -# * either - The license will be approved if it is either OSI-approved *OR* FSF -# * osi-only - The license will be approved if is OSI-approved *AND NOT* FSF -# * fsf-only - The license will be approved if is FSF *AND NOT* OSI-approved -# * neither - This predicate is ignored and the default lint level is used -allow-osi-fsf-free = "neither" -# Lint level used when no other predicates are matched -# 1. License isn't in the allow or deny lists -# 2. License isn't copyleft -# 3. License isn't OSI/FSF, or allow-osi-fsf-free = "neither" -default = "deny" # The confidence threshold for detecting a license from license text. # The higher the value, the more closely the license text must be to the # canonical license text of a valid SPDX license file. diff --git a/publish.py b/publish.py index 2918e1d998a..f01cbc666c0 100755 --- a/publish.py +++ b/publish.py @@ -15,6 +15,9 @@ from urllib.error import HTTPError +# Whenever you add a new crate to this list that does NOT start with "cargo-" +# you must reach out to the infra team to add the crate to the list of crates +# allowed to be published from the "cargo CI" crates.io token. TO_PUBLISH = [ 'credential/cargo-credential', 'credential/cargo-credential-libsecret', diff --git a/src/bin/cargo/cli.rs b/src/bin/cargo/cli.rs index f4f04bb4f5e..aa72d7c1a3b 100644 --- a/src/bin/cargo/cli.rs +++ b/src/bin/cargo/cli.rs @@ -356,7 +356,13 @@ For more information, see issue #12207 cargo help <>' for more information on a sp .action(ArgAction::SetTrue) .global(true) .hide(true)) - .arg(multi_opt("config", "KEY=VALUE", "Override a configuration value").global(true)) + .arg(multi_opt("config", "KEY=VALUE|PATH", "Override a configuration value").global(true)) // Better suggestion for the unsupported lowercase unstable feature flag. .arg( Arg::new("unsupported-lowercase-unstable-feature-flag") .help("") @@ -676,7 +682,15 @@ See 'cargo help <>' for more information on a sp .short('Z') .value_name("FLAG") .action(ArgAction::Append) - .global(true)) + .global(true) + .add(clap_complete::ArgValueCandidates::new(|| { + let flags = CliUnstable::help(); + flags.into_iter().map(|flag| { + clap_complete::CompletionCandidate::new(flag.0.replace("_", "-")).help(flag.1.map(|help| { + help.into() + })) + }).collect() + }))) .subcommands(commands::builtin()) } diff --git a/src/bin/cargo/commands/add.rs b/src/bin/cargo/commands/add.rs index 9c67eb8343c..263b29b1fac 100644 --- a/src/bin/cargo/commands/add.rs +++ b/src/bin/cargo/commands/add.rs @@ -87,6 +87,7 @@ Example uses: - Depend on crates with the same name from different registries"), ]) .arg_manifest_path_without_unsupported_path_tip() + .arg_lockfile_path() .arg_package("Package to modify") .arg_ignore_rust_version() .arg_dry_run("Don't actually write the manifest") @@ -100,6 +101,12 @@ Example uses: .help("Filesystem path to local crate to add") .group("selected") .conflicts_with("git"), + clap::Arg::new("base") + .long("base") + .action(ArgAction::Set) + .value_name("BASE") + .help("The path base to use when adding from a local crate (unstable).") + .requires("path"), clap::Arg::new("git") .long("git") .action(ArgAction::Set) @@ -137,7 +144,11 @@ This is the catch all, handling hashes to named references in remote repositorie .long("registry") .action(ArgAction::Set) .value_name("NAME") - .help("Package registry for this dependency"), + .help("Package registry for this dependency") + .add(clap_complete::ArgValueCandidates::new(|| { + let candidates = get_registry_candidates(); + candidates.unwrap_or_default() + })), ]) .next_help_heading("Section") .args([ @@ -214,17 +225,16 @@ pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { }; add(&ws, &options)?; - if !dry_run { - // Reload the workspace since we've changed dependencies - let ws = args.workspace(gctx)?; - resolve_ws(&ws)?; - } + // Reload the workspace since we've changed dependencies + let ws = args.workspace(gctx)?; + resolve_ws(&ws, dry_run)?; Ok(()) } fn parse_dependencies(gctx: &GlobalContext, matches: &ArgMatches) -> CargoResult> { let path = matches.get_one::("path"); + let base = matches.get_one::("base"); let git = matches.get_one::("git"); let branch = matches.get_one::("branch"); let rev = matches.get_one::("rev"); @@ -330,6 +340,7 @@ fn parse_dependencies(gctx: &GlobalContext, matches: &ArgMatches) -> CargoResult public, registry: registry.clone(), path: path.map(String::from), + base: base.map(String::from), git: git.map(String::from), branch: branch.map(String::from), rev: rev.map(String::from), diff --git a/src/bin/cargo/commands/bench.rs b/src/bin/cargo/commands/bench.rs index f6d8766570d..1f4c8df80ea 100644 --- a/src/bin/cargo/commands/bench.rs +++ b/src/bin/cargo/commands/bench.rs @@ -50,6 +50,7 @@ pub fn cli() -> Command { .arg_unit_graph() .arg_timings() .arg_manifest_path() + .arg_lockfile_path() .arg_ignore_rust_version() .after_help(color_print::cstr!( "Run `cargo help bench` for more detailed information.\n" @@ -63,7 +64,7 @@ pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { args.compile_options(gctx, CompileMode::Bench, Some(&ws), ProfileChecking::Custom)?; compile_opts.build_config.requested_profile = - args.get_profile_name(gctx, "bench", ProfileChecking::Custom)?; + args.get_profile_name("bench", ProfileChecking::Custom)?; let ops = TestOptions { no_run: args.flag("no-run"), diff --git a/src/bin/cargo/commands/build.rs b/src/bin/cargo/commands/build.rs index 308ce2ce6a7..86d477ccac2 100644 --- a/src/bin/cargo/commands/build.rs +++ b/src/bin/cargo/commands/build.rs @@ -34,11 +34,12 @@ pub fn cli() -> Command { .arg_parallel() .arg_target_triple("Build for the target triple") .arg_target_dir() - .arg_out_dir() + .arg_artifact_dir() .arg_build_plan() .arg_unit_graph() .arg_timings() .arg_manifest_path() + .arg_lockfile_path() .arg_ignore_rust_version() .after_help(color_print::cstr!( "Run `cargo help build` for more detailed information.\n" @@ -50,15 +51,32 @@ pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { let mut compile_opts = args.compile_options(gctx, CompileMode::Build, Some(&ws), ProfileChecking::Custom)?; - if let Some(out_dir) = args.value_of_path("out-dir", gctx) { - compile_opts.build_config.export_dir = Some(out_dir); - } else if let Some(out_dir) = gctx.build_config()?.out_dir.as_ref() { - let out_dir = out_dir.resolve_path(gctx); - compile_opts.build_config.export_dir = Some(out_dir); + if let Some(artifact_dir) = args.value_of_path("artifact-dir", gctx) { + // If the user specifies `--artifact-dir`, use that + compile_opts.build_config.export_dir = Some(artifact_dir); + } else if let Some(artifact_dir) = args.value_of_path("out-dir", gctx) { + // `--out-dir` is deprecated, but still supported for now + gctx.shell() + .warn("the --out-dir flag has been changed to --artifact-dir")?; + compile_opts.build_config.export_dir = Some(artifact_dir); + } else if let Some(artifact_dir) = gctx.build_config()?.artifact_dir.as_ref() { + // If a CLI option is not specified for choosing the artifact dir, use the `artifact-dir` from the build config, if + // present + let artifact_dir = artifact_dir.resolve_path(gctx); + compile_opts.build_config.export_dir = Some(artifact_dir); + } else if let Some(artifact_dir) = gctx.build_config()?.out_dir.as_ref() { + // As a last priority, check `out-dir` in the build config + gctx.shell() + .warn("the out-dir config option has been changed to artifact-dir")?; + let artifact_dir = artifact_dir.resolve_path(gctx); + compile_opts.build_config.export_dir = Some(artifact_dir); } + if compile_opts.build_config.export_dir.is_some() { - gctx.cli_unstable().fail_if_stable_opt("--out-dir", 6790)?; + gctx.cli_unstable() + .fail_if_stable_opt("--artifact-dir", 6790)?; } + ops::compile(&ws, &compile_opts)?; Ok(()) } diff --git a/src/bin/cargo/commands/check.rs b/src/bin/cargo/commands/check.rs index 56f274effba..66f378c3e90 100644 --- a/src/bin/cargo/commands/check.rs +++ b/src/bin/cargo/commands/check.rs @@ -36,6 +36,7 @@ pub fn cli() -> Command { .arg_unit_graph() .arg_timings() .arg_manifest_path() + .arg_lockfile_path() .arg_ignore_rust_version() .after_help(color_print::cstr!( "Run `cargo help check` for more detailed information.\n" diff --git a/src/bin/cargo/commands/clean.rs b/src/bin/cargo/commands/clean.rs index e358b967150..d9414b4d17d 100644 --- a/src/bin/cargo/commands/clean.rs +++ b/src/bin/cargo/commands/clean.rs @@ -19,6 +19,7 @@ pub fn cli() -> Command { .arg_target_triple("Target triple to clean output for") .arg_target_dir() .arg_manifest_path() + .arg_lockfile_path() .arg_dry_run("Display what would be deleted without deleting anything") .args_conflicts_with_subcommands(true) .subcommand( @@ -146,7 +147,7 @@ pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { gctx, spec: values(args, "package"), targets: args.targets()?, - requested_profile: args.get_profile_name(gctx, "dev", ProfileChecking::Custom)?, + requested_profile: args.get_profile_name("dev", ProfileChecking::Custom)?, profile_specified: args.contains_id("profile") || args.flag("release"), doc: args.flag("doc"), dry_run: args.dry_run(), diff --git a/src/bin/cargo/commands/doc.rs b/src/bin/cargo/commands/doc.rs index 2603b3cb777..6707364d946 100644 --- a/src/bin/cargo/commands/doc.rs +++ b/src/bin/cargo/commands/doc.rs @@ -39,6 +39,7 @@ pub fn cli() -> Command { .arg_unit_graph() .arg_timings() .arg_manifest_path() + .arg_lockfile_path() .arg_ignore_rust_version() .after_help(color_print::cstr!( "Run `cargo help doc` for more detailed information.\n" diff --git a/src/bin/cargo/commands/fetch.rs b/src/bin/cargo/commands/fetch.rs index f60ed61b854..2fdba80baf8 100644 --- a/src/bin/cargo/commands/fetch.rs +++ b/src/bin/cargo/commands/fetch.rs @@ -9,6 +9,7 @@ pub fn cli() -> Command { .arg_silent_suggestion() .arg_target_triple("Fetch dependencies for the target triple") .arg_manifest_path() + .arg_lockfile_path() .after_help(color_print::cstr!( "Run `cargo help fetch` for more detailed information.\n" )) diff --git a/src/bin/cargo/commands/fix.rs b/src/bin/cargo/commands/fix.rs index 8190cf07e95..e44980d1330 100644 --- a/src/bin/cargo/commands/fix.rs +++ b/src/bin/cargo/commands/fix.rs @@ -1,6 +1,6 @@ use crate::command_prelude::*; - use cargo::core::Workspace; + use cargo::ops; pub fn cli() -> Command { @@ -54,6 +54,7 @@ pub fn cli() -> Command { .arg_target_dir() .arg_timings() .arg_manifest_path() + .arg_lockfile_path() .arg_ignore_rust_version() .after_help(color_print::cstr!( "Run `cargo help fix` for more detailed information.\n" @@ -71,8 +72,13 @@ pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { // Unlike other commands default `cargo fix` to all targets to fix as much // code as we can. let root_manifest = args.root_manifest(gctx)?; + + // Can't use workspace() to avoid using -Zavoid-dev-deps (if passed) let mut ws = Workspace::new(&root_manifest, gctx)?; ws.set_resolve_honors_rust_version(args.honor_rust_version()); + let lockfile_path = args.lockfile_path(gctx)?; + ws.set_requested_lockfile_path(lockfile_path.clone()); + let mut opts = args.compile_options(gctx, mode, Some(&ws), ProfileChecking::LegacyTestOnly)?; if !opts.filter.is_specific() { @@ -92,6 +98,7 @@ pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { allow_no_vcs: args.flag("allow-no-vcs"), allow_staged: args.flag("allow-staged"), broken_code: args.flag("broken-code"), + requested_lockfile_path: lockfile_path, }, )?; Ok(()) diff --git a/src/bin/cargo/commands/generate_lockfile.rs b/src/bin/cargo/commands/generate_lockfile.rs index a2ddac61dd9..3ad858daaa7 100644 --- a/src/bin/cargo/commands/generate_lockfile.rs +++ b/src/bin/cargo/commands/generate_lockfile.rs @@ -7,6 +7,7 @@ pub fn cli() -> Command { .about("Generate the lockfile for a package") .arg_silent_suggestion() .arg_manifest_path() + .arg_lockfile_path() .arg_ignore_rust_version_with_help( "Ignore `rust-version` specification in packages (unstable)", ) diff --git a/src/bin/cargo/commands/help.rs b/src/bin/cargo/commands/help.rs index a92f5d140bc..f9f91cf7249 100644 --- a/src/bin/cargo/commands/help.rs +++ b/src/bin/cargo/commands/help.rs @@ -15,7 +15,19 @@ const COMPRESSED_MAN: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/man.tgz" pub fn cli() -> Command { subcommand("help") .about("Displays help for a cargo subcommand") - .arg(Arg::new("COMMAND").action(ArgAction::Set)) + .arg(Arg::new("COMMAND").action(ArgAction::Set).add( + clap_complete::ArgValueCandidates::new(|| { + super::builtin() + .iter() + .map(|cmd| { + let name = cmd.get_name(); + clap_complete::CompletionCandidate::new(name) + .help(cmd.get_about().cloned()) + .hide(cmd.is_hide_set()) + }) + .collect() + }), + )) } pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { diff --git a/src/bin/cargo/commands/info.rs b/src/bin/cargo/commands/info.rs new file mode 100644 index 00000000000..065fcbcd9aa --- /dev/null +++ b/src/bin/cargo/commands/info.rs @@ -0,0 +1,35 @@ +use anyhow::Context; +use cargo::ops::info; +use cargo::util::command_prelude::*; +use cargo_util_schemas::core::PackageIdSpec; + +pub fn cli() -> Command { + Command::new("info") + .about("Display information about a package in the registry") + .arg( + Arg::new("package") + .required(true) + .value_name("SPEC") + .help_heading(heading::PACKAGE_SELECTION) + .help("Package to inspect"), + ) + .arg_index("Registry index URL to search packages in") + .arg_registry("Registry to search packages in") + .arg_silent_suggestion() + .after_help(color_print::cstr!( + "Run `cargo help info` for more detailed information.\n" + )) +} + +pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { + let package = args + .get_one::("package") + .map(String::as_str) + .unwrap(); + let spec = PackageIdSpec::parse(package) + .with_context(|| format!("invalid package ID specification: `{package}`"))?; + + let reg_or_index = args.registry_or_index(gctx)?; + info(&spec, gctx, reg_or_index)?; + Ok(()) +} diff --git a/src/bin/cargo/commands/install.rs b/src/bin/cargo/commands/install.rs index e188d6a0d67..2b0a77feac9 100644 --- a/src/bin/cargo/commands/install.rs +++ b/src/bin/cargo/commands/install.rs @@ -69,6 +69,7 @@ pub fn cli() -> Command { ) .arg(opt("root", "Directory to install packages into").value_name("DIR")) .arg(flag("force", "Force overwriting existing crates or binaries").short('f')) + .arg_dry_run("Perform all checks without installing (unstable)") .arg(flag("no-track", "Do not save tracking information")) .arg(flag( "list", @@ -85,15 +86,19 @@ pub fn cli() -> Command { ) .arg_features() .arg_parallel() - .arg(flag( - "debug", - "Build in debug mode (with the 'dev' profile) instead of release mode", - )) + .arg( + flag( + "debug", + "Build in debug mode (with the 'dev' profile) instead of release mode", + ) + .conflicts_with("profile"), + ) .arg_redundant_default_mode("release", "install", "debug") .arg_profile("Install artifacts with the specified profile") .arg_target_triple("Build for the target triple") .arg_target_dir() .arg_timings() + .arg_lockfile_path() .after_help(color_print::cstr!( "Run `cargo help install` for more detailed information.\n" )) @@ -196,7 +201,16 @@ pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { )?; compile_opts.build_config.requested_profile = - args.get_profile_name(gctx, "release", ProfileChecking::Custom)?; + args.get_profile_name("release", ProfileChecking::Custom)?; + if args.dry_run() { + gctx.cli_unstable().fail_if_stable_opt("--dry-run", 11123)?; + } + + let requested_lockfile_path = args.lockfile_path(gctx)?; + // 14421: lockfile path should imply --locked on running `install` + if requested_lockfile_path.is_some() { + gctx.set_locked(true); + } if args.flag("list") { ops::install_list(root, gctx)?; @@ -210,6 +224,8 @@ pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { &compile_opts, args.flag("force"), args.flag("no-track"), + args.dry_run(), + requested_lockfile_path.as_deref(), )?; } Ok(()) diff --git a/src/bin/cargo/commands/metadata.rs b/src/bin/cargo/commands/metadata.rs index 83232ef47f2..0b98d50a114 100644 --- a/src/bin/cargo/commands/metadata.rs +++ b/src/bin/cargo/commands/metadata.rs @@ -1,6 +1,7 @@ -use crate::command_prelude::*; use cargo::ops::{self, OutputMetadataOptions}; +use crate::command_prelude::*; + pub fn cli() -> Command { subcommand("metadata") .about( @@ -26,6 +27,7 @@ pub fn cli() -> Command { .arg_silent_suggestion() .arg_features() .arg_manifest_path() + .arg_lockfile_path() .after_help(color_print::cstr!( "Run `cargo help metadata` for more detailed information.\n" )) diff --git a/src/bin/cargo/commands/mod.rs b/src/bin/cargo/commands/mod.rs index 02c3438dc47..b507226f3a9 100644 --- a/src/bin/cargo/commands/mod.rs +++ b/src/bin/cargo/commands/mod.rs @@ -14,6 +14,7 @@ pub fn builtin() -> Vec { generate_lockfile::cli(), git_checkout::cli(), help::cli(), + info::cli(), init::cli(), install::cli(), locate_project::cli(), @@ -59,6 +60,7 @@ pub fn builtin_exec(cmd: &str) -> Option { "generate-lockfile" => generate_lockfile::exec, "git-checkout" => git_checkout::exec, "help" => help::exec, + "info" => info::exec, "init" => init::exec, "install" => install::exec, "locate-project" => locate_project::exec, @@ -102,6 +104,7 @@ pub mod fix; pub mod generate_lockfile; pub mod git_checkout; pub mod help; +pub mod info; pub mod init; pub mod install; pub mod locate_project; diff --git a/src/bin/cargo/commands/package.rs b/src/bin/cargo/commands/package.rs index 27b48097c6a..251fa286ec5 100644 --- a/src/bin/cargo/commands/package.rs +++ b/src/bin/cargo/commands/package.rs @@ -5,6 +5,8 @@ use cargo::ops::{self, PackageOpts}; pub fn cli() -> Command { subcommand("package") .about("Assemble the local package into a distributable tarball") + .arg_index("Registry index URL to prepare the package for (unstable)") + .arg_registry("Registry to prepare the package for (unstable)") .arg( flag( "list", @@ -35,12 +37,30 @@ pub fn cli() -> Command { .arg_target_dir() .arg_parallel() .arg_manifest_path() + .arg_lockfile_path() .after_help(color_print::cstr!( "Run `cargo help package` for more detailed information.\n" )) } pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { + if args._value_of("registry").is_some() { + gctx.cli_unstable().fail_if_stable_opt_custom_z( + "--registry", + 13947, + "package-workspace", + gctx.cli_unstable().package_workspace, + )?; + } + if args._value_of("index").is_some() { + gctx.cli_unstable().fail_if_stable_opt_custom_z( + "--index", + 13947, + "package-workspace", + gctx.cli_unstable().package_workspace, + )?; + } + let reg_or_index = args.registry_or_index(gctx)?; let ws = args.workspace(gctx)?; if ws.root_maybe().is_embedded() { return Err(anyhow::format_err!( @@ -64,6 +84,7 @@ pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { jobs: args.jobs()?, keep_going: args.keep_going(), cli_features: args.cli_features()?, + reg_or_index, }, )?; diff --git a/src/bin/cargo/commands/pkgid.rs b/src/bin/cargo/commands/pkgid.rs index 72abbfc0788..5fcf85b8fd9 100644 --- a/src/bin/cargo/commands/pkgid.rs +++ b/src/bin/cargo/commands/pkgid.rs @@ -10,6 +10,7 @@ pub fn cli() -> Command { .arg_silent_suggestion() .arg_package("Argument to get the package ID specifier for") .arg_manifest_path() + .arg_lockfile_path() .after_help(color_print::cstr!( "Run `cargo help pkgid` for more detailed information.\n" )) diff --git a/src/bin/cargo/commands/publish.rs b/src/bin/cargo/commands/publish.rs index 3b497e1ed12..df1c4654ffe 100644 --- a/src/bin/cargo/commands/publish.rs +++ b/src/bin/cargo/commands/publish.rs @@ -24,6 +24,7 @@ pub fn cli() -> Command { .arg_target_triple("Build for the target triple") .arg_target_dir() .arg_manifest_path() + .arg_lockfile_path() .after_help(color_print::cstr!( "Run `cargo help publish` for more detailed information.\n" )) diff --git a/src/bin/cargo/commands/read_manifest.rs b/src/bin/cargo/commands/read_manifest.rs index b86bbf795bc..692b79d1cbb 100644 --- a/src/bin/cargo/commands/read_manifest.rs +++ b/src/bin/cargo/commands/read_manifest.rs @@ -15,6 +15,9 @@ Deprecated, use `cargo metadata --no-deps` instead.\ pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { let ws = args.workspace(gctx)?; - gctx.shell().print_json(&ws.current()?.serialized())?; + gctx.shell().print_json( + &ws.current()? + .serialized(gctx.cli_unstable(), ws.unstable_features()), + )?; Ok(()) } diff --git a/src/bin/cargo/commands/remove.rs b/src/bin/cargo/commands/remove.rs index 25179487c93..833fd00c549 100644 --- a/src/bin/cargo/commands/remove.rs +++ b/src/bin/cargo/commands/remove.rs @@ -51,6 +51,7 @@ pub fn cli() -> clap::Command { ]) .arg_package("Package to remove from") .arg_manifest_path() + .arg_lockfile_path() .after_help(color_print::cstr!( "Run `cargo help remove` for more detailed information.\n" )) @@ -121,7 +122,7 @@ pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { ws.gctx() .shell() .set_verbosity(cargo::core::Verbosity::Quiet); - let resolve = resolve_ws(&ws); + let resolve = resolve_ws(&ws, dry_run); ws.gctx().shell().set_verbosity(verbosity); resolve?.1 }; @@ -129,7 +130,7 @@ pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { // Attempt to gc unused patches and re-resolve if anything is removed if gc_unused_patches(&workspace, &resolve)? { let ws = args.workspace(gctx)?; - resolve_ws(&ws)?; + resolve_ws(&ws, dry_run)?; } } Ok(()) @@ -166,20 +167,37 @@ fn gc_workspace(workspace: &Workspace<'_>) -> CargoResult<()> { let members = workspace .members() - .map(|p| LocalManifest::try_new(p.manifest_path())) + .map(|p| { + Ok(( + LocalManifest::try_new(p.manifest_path())?, + p.manifest().unstable_features(), + )) + }) .collect::>>()?; let mut dependencies = members - .iter() - .flat_map(|manifest| { - manifest.get_sections().into_iter().flat_map(|(_, table)| { - table - .as_table_like() - .unwrap() - .iter() - .map(|(key, item)| Dependency::from_toml(&manifest.path, key, item)) - .collect::>() - }) + .into_iter() + .flat_map(|(manifest, unstable_features)| { + manifest + .get_sections() + .into_iter() + .flat_map(move |(_, table)| { + table + .as_table_like() + .unwrap() + .iter() + .map(|(key, item)| { + Dependency::from_toml( + workspace.gctx(), + workspace.root(), + &manifest.path, + &unstable_features, + key, + item, + ) + }) + .collect::>() + }) }) .collect::>>()?; @@ -191,7 +209,14 @@ fn gc_workspace(workspace: &Workspace<'_>) -> CargoResult<()> { { deps_table.set_implicit(true); for (key, item) in deps_table.iter_mut() { - let ws_dep = Dependency::from_toml(&workspace.root(), key.get(), item)?; + let ws_dep = Dependency::from_toml( + workspace.gctx(), + workspace.root(), + &workspace.root(), + workspace.unstable_features(), + key.get(), + item, + )?; // search for uses of this workspace dependency let mut is_used = false; @@ -328,7 +353,14 @@ fn gc_unused_patches(workspace: &Workspace<'_>, resolve: &Resolve) -> CargoResul patch_table.set_implicit(true); for (key, item) in patch_table.iter_mut() { - let dep = Dependency::from_toml(&workspace.root_manifest(), key.get(), item)?; + let dep = Dependency::from_toml( + workspace.gctx(), + workspace.root(), + &workspace.root_manifest(), + workspace.unstable_features(), + key.get(), + item, + )?; // Generate a PackageIdSpec url for querying let url = if let MaybeWorkspace::Other(source_id) = diff --git a/src/bin/cargo/commands/run.rs b/src/bin/cargo/commands/run.rs index 74eb1450bd0..c9e59770a28 100644 --- a/src/bin/cargo/commands/run.rs +++ b/src/bin/cargo/commands/run.rs @@ -38,6 +38,7 @@ pub fn cli() -> Command { .arg_target_triple("Build for the target triple") .arg_target_dir() .arg_manifest_path() + .arg_lockfile_path() .arg_ignore_rust_version() .arg_unit_graph() .arg_timings() diff --git a/src/bin/cargo/commands/rustc.rs b/src/bin/cargo/commands/rustc.rs index 2f52c6b5926..454282f0e6e 100644 --- a/src/bin/cargo/commands/rustc.rs +++ b/src/bin/cargo/commands/rustc.rs @@ -52,6 +52,7 @@ pub fn cli() -> Command { .arg_unit_graph() .arg_timings() .arg_manifest_path() + .arg_lockfile_path() .arg_ignore_rust_version() .after_help(color_print::cstr!( "Run `cargo help rustc` for more detailed information.\n" @@ -90,7 +91,16 @@ pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { ops::print(&ws, &compile_opts, opt_value)?; return Ok(()); } - let crate_types = values(args, CRATE_TYPE_ARG_NAME); + + let crate_types = args + .get_many::(CRATE_TYPE_ARG_NAME) + .into_iter() + .flatten() + .flat_map(|s| s.split(',')) + .filter(|s| !s.is_empty()) + .map(String::from) + .collect::>(); + compile_opts.target_rustc_crate_types = if crate_types.is_empty() { None } else { diff --git a/src/bin/cargo/commands/rustdoc.rs b/src/bin/cargo/commands/rustdoc.rs index 6535ca405c6..f9c290bf5a0 100644 --- a/src/bin/cargo/commands/rustdoc.rs +++ b/src/bin/cargo/commands/rustdoc.rs @@ -45,6 +45,7 @@ pub fn cli() -> Command { .arg_unit_graph() .arg_timings() .arg_manifest_path() + .arg_lockfile_path() .arg_ignore_rust_version() .after_help(color_print::cstr!( "Run `cargo help rustdoc` for more detailed information.\n" diff --git a/src/bin/cargo/commands/test.rs b/src/bin/cargo/commands/test.rs index 1815ee62f9f..73c3505100c 100644 --- a/src/bin/cargo/commands/test.rs +++ b/src/bin/cargo/commands/test.rs @@ -60,6 +60,7 @@ pub fn cli() -> Command { .arg_unit_graph() .arg_timings() .arg_manifest_path() + .arg_lockfile_path() .arg_ignore_rust_version() .after_help(color_print::cstr!( "Run `cargo help test` for more detailed information.\n\ @@ -74,7 +75,7 @@ pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { args.compile_options(gctx, CompileMode::Test, Some(&ws), ProfileChecking::Custom)?; compile_opts.build_config.requested_profile = - args.get_profile_name(gctx, "test", ProfileChecking::Custom)?; + args.get_profile_name("test", ProfileChecking::Custom)?; // `TESTNAME` is actually an argument of the test binary, but it's // important, so we explicitly mention it and reconfigure. diff --git a/src/bin/cargo/commands/tree.rs b/src/bin/cargo/commands/tree.rs index 6d83f8e8e95..b0f35370ebc 100644 --- a/src/bin/cargo/commands/tree.rs +++ b/src/bin/cargo/commands/tree.rs @@ -95,6 +95,7 @@ pub fn cli() -> Command { Pass `all` to include all targets.", ) .arg_manifest_path() + .arg_lockfile_path() .after_help(color_print::cstr!( "Run `cargo help tree` for more detailed information.\n" )) diff --git a/src/bin/cargo/commands/uninstall.rs b/src/bin/cargo/commands/uninstall.rs index cad538de9c0..d50996d9af6 100644 --- a/src/bin/cargo/commands/uninstall.rs +++ b/src/bin/cargo/commands/uninstall.rs @@ -5,7 +5,14 @@ use cargo::ops; pub fn cli() -> Command { subcommand("uninstall") .about("Remove a Rust binary") - .arg(Arg::new("spec").value_name("SPEC").num_args(0..)) + .arg( + Arg::new("spec") + .value_name("SPEC") + .num_args(0..) + .add::(clap_complete::ArgValueCandidates::new( + || get_installed_crates(), + )), + ) .arg(opt("root", "Directory to uninstall packages from").value_name("DIR")) .arg_silent_suggestion() .arg_package_spec_simple("Package to uninstall") @@ -37,3 +44,25 @@ pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { ops::uninstall(root, specs, &values(args, "bin"), gctx)?; Ok(()) } + +fn get_installed_crates() -> Vec { + get_installed_crates_().unwrap_or_default() +} + +fn get_installed_crates_() -> Option> { + let mut candidates = Vec::new(); + + let gctx = GlobalContext::default().ok()?; + + let root = ops::resolve_root(None, &gctx).ok()?; + + let tracker = ops::InstallTracker::load(&gctx, &root).ok()?; + + for (_, v) in tracker.all_installed_bins() { + for bin in v { + candidates.push(clap_complete::CompletionCandidate::new(bin)); + } + } + + Some(candidates) +} diff --git a/src/bin/cargo/commands/update.rs b/src/bin/cargo/commands/update.rs index fb394e4aa33..a1733a50487 100644 --- a/src/bin/cargo/commands/update.rs +++ b/src/bin/cargo/commands/update.rs @@ -13,7 +13,10 @@ pub fn cli() -> Command { .value_name("SPEC") .help_heading(heading::PACKAGE_SELECTION) .group("package-group") - .help("Package to update")]) + .help("Package to update") + .add(clap_complete::ArgValueCandidates::new( + get_pkg_id_spec_candidates, + ))]) .arg( optional_multi_opt("package", "SPEC", "Package to update") .short('p') @@ -35,6 +38,13 @@ pub fn cli() -> Command { .value_name("PRECISE") .requires("package-group"), ) + .arg( + flag( + "breaking", + "Update [SPEC] to latest SemVer-breaking version (unstable)", + ) + .short('b'), + ) .arg_silent_suggestion() .arg( flag("workspace", "Only update the workspace packages") @@ -42,6 +52,7 @@ pub fn cli() -> Command { .help_heading(heading::PACKAGE_SELECTION), ) .arg_manifest_path() + .arg_lockfile_path() .arg_ignore_rust_version_with_help( "Ignore `rust-version` specification in packages (unstable)", ) @@ -59,7 +70,8 @@ pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { gctx.cli_unstable().msrv_policy, )?; } - let ws = args.workspace(gctx)?; + + let mut ws = args.workspace(gctx)?; if args.is_present_with_zero_values("package") { print_available_packages(&ws)?; @@ -89,6 +101,24 @@ pub fn exec(gctx: &mut GlobalContext, args: &ArgMatches) -> CliResult { workspace: args.flag("workspace"), gctx, }; - ops::update_lockfile(&ws, &update_opts)?; + + if args.flag("breaking") { + gctx.cli_unstable() + .fail_if_stable_opt("--breaking", 12425)?; + + let upgrades = ops::upgrade_manifests(&mut ws, &update_opts.to_update)?; + ops::resolve_ws(&ws, update_opts.dry_run)?; + ops::write_manifest_upgrades(&ws, &upgrades, update_opts.dry_run)?; + + if update_opts.dry_run { + update_opts + .gctx + .shell() + .warn("aborting update due to dry run")?; + } + } else { + ops::update_lockfile(&ws, &update_opts)?; + } + Ok(()) } diff --git a/src/bin/cargo/commands/vendor.rs b/src/bin/cargo/commands/vendor.rs index efa1f1bb7b6..96b30676732 100644 --- a/src/bin/cargo/commands/vendor.rs +++ b/src/bin/cargo/commands/vendor.rs @@ -37,6 +37,7 @@ pub fn cli() -> Command { .arg(unsupported("only-git-deps")) .arg(unsupported("disallow-duplicates")) .arg_manifest_path() + .arg_lockfile_path() .after_help(color_print::cstr!( "Run `cargo help vendor` for more detailed information.\n" )) diff --git a/src/bin/cargo/main.rs b/src/bin/cargo/main.rs index 5dbf9363460..fa41e9c3ff4 100644 --- a/src/bin/cargo/main.rs +++ b/src/bin/cargo/main.rs @@ -1,5 +1,6 @@ #![allow(clippy::self_named_module_files)] // false positive in `commands/build.rs` +use cargo::core::features; use cargo::core::shell::Shell; use cargo::util::network::http::http_handle; use cargo::util::network::http::needs_custom_http_transport; @@ -28,6 +29,27 @@ fn main() { } }; + let nightly_features_allowed = matches!(&*features::channel(), "nightly" | "dev"); + if nightly_features_allowed { + let _span = tracing::span!(tracing::Level::TRACE, "completions").entered(); + let args = std::env::args_os(); + let current_dir = std::env::current_dir().ok(); + let completer = clap_complete::CompleteEnv::with_factory(|| { + let mut gctx = GlobalContext::default().expect("already loaded without errors"); + cli::cli(&mut gctx) + }) + .var("CARGO_COMPLETE"); + if completer + .try_complete(args, current_dir.as_deref()) + .unwrap_or_else(|e| { + let mut shell = Shell::new(); + cargo::exit_with_error(e.into(), &mut shell) + }) + { + return; + } + } + let result = if let Some(lock_addr) = cargo::ops::fix_get_proxy_lock_addr() { cargo::ops::fix_exec_rustc(&gctx, &lock_addr).map_err(|e| CliError::from(e)) } else { diff --git a/src/cargo/core/compiler/build_config.rs b/src/cargo/core/compiler/build_config.rs index 9536e58be56..4c804f27b68 100644 --- a/src/cargo/core/compiler/build_config.rs +++ b/src/cargo/core/compiler/build_config.rs @@ -31,16 +31,18 @@ pub struct BuildConfig { pub build_plan: bool, /// Output the unit graph to stdout instead of actually compiling. pub unit_graph: bool, + /// `true` to avoid really compiling. + pub dry_run: bool, /// An optional override of the rustc process for primary units pub primary_unit_rustc: Option, /// A thread used by `cargo fix` to receive messages on a socket regarding /// the success/failure of applying fixes. pub rustfix_diagnostic_server: Rc>>, - /// The directory to copy final artifacts to. Note that even if `out_dir` is - /// set, a copy of artifacts still could be found a `target/(debug\release)` - /// as usual. - // Note that, although the cmd-line flag name is `out-dir`, in code we use - // `export_dir`, to avoid confusion with out dir at `target/debug/deps`. + /// The directory to copy final artifacts to. Note that even if + /// `artifact-dir` is set, a copy of artifacts still can be found at + /// `target/(debug\release)` as usual. + /// Named `export_dir` to avoid confusion with + /// `CompilationFiles::artifact_dir`. pub export_dir: Option, /// `true` to output a future incompatibility report at the end of the build pub future_incompat_report: bool, @@ -112,6 +114,7 @@ impl BuildConfig { force_rebuild: false, build_plan: false, unit_graph: false, + dry_run: false, primary_unit_rustc: None, rustfix_diagnostic_server: Rc::new(RefCell::new(None)), export_dir: None, diff --git a/src/cargo/core/compiler/build_context/mod.rs b/src/cargo/core/compiler/build_context/mod.rs index 78c1e90ae5a..1bc25d12c19 100644 --- a/src/cargo/core/compiler/build_context/mod.rs +++ b/src/cargo/core/compiler/build_context/mod.rs @@ -134,32 +134,6 @@ impl<'a, 'gctx> BuildContext<'a, 'gctx> { self.build_config.jobs } - /// Extra compiler flags to pass to `rustc` for a given unit. - /// - /// Although it depends on the caller, in the current Cargo implementation, - /// these flags take precedence over those from [`BuildContext::extra_args_for`]. - /// - /// As of now, these flags come from environment variables and configurations. - /// See [`TargetInfo.rustflags`] for more on how Cargo collects them. - /// - /// [`TargetInfo.rustflags`]: TargetInfo::rustflags - pub fn rustflags_args(&self, unit: &Unit) -> &[String] { - &self.target_data.info(unit.kind).rustflags - } - - /// Extra compiler flags to pass to `rustdoc` for a given unit. - /// - /// Although it depends on the caller, in the current Cargo implementation, - /// these flags take precedence over those from [`BuildContext::extra_args_for`]. - /// - /// As of now, these flags come from environment variables and configurations. - /// See [`TargetInfo.rustdocflags`] for more on how Cargo collects them. - /// - /// [`TargetInfo.rustdocflags`]: TargetInfo::rustdocflags - pub fn rustdocflags_args(&self, unit: &Unit) -> &[String] { - &self.target_data.info(unit.kind).rustdocflags - } - /// Extra compiler args for either `rustc` or `rustdoc`. /// /// As of now, these flags come from the trailing args of either diff --git a/src/cargo/core/compiler/build_context/target_info.rs b/src/cargo/core/compiler/build_context/target_info.rs index f80f45c8f58..f36fc173bcc 100644 --- a/src/cargo/core/compiler/build_context/target_info.rs +++ b/src/cargo/core/compiler/build_context/target_info.rs @@ -8,9 +8,7 @@ //! * [`TargetInfo::rustc_outputs`] to get a list of supported file types. use crate::core::compiler::apply_env_config; -use crate::core::compiler::{ - BuildOutput, BuildRunner, CompileKind, CompileMode, CompileTarget, CrateType, -}; +use crate::core::compiler::{BuildRunner, CompileKind, CompileMode, CompileTarget, CrateType}; use crate::core::{Dependency, Package, Target, TargetKind, Workspace}; use crate::util::context::{GlobalContext, StringList, TargetConfig}; use crate::util::interning::InternedString; @@ -22,6 +20,7 @@ use serde::{Deserialize, Serialize}; use std::cell::RefCell; use std::collections::hash_map::{Entry, HashMap}; use std::path::{Path, PathBuf}; +use std::rc::Rc; use std::str::{self, FromStr}; /// Information about the platform target gleaned from querying rustc. @@ -52,9 +51,9 @@ pub struct TargetInfo { /// target libraries. pub sysroot_target_libdir: PathBuf, /// Extra flags to pass to `rustc`, see [`extra_args`]. - pub rustflags: Vec, + pub rustflags: Rc<[String]>, /// Extra flags to pass to `rustdoc`, see [`extra_args`]. - pub rustdocflags: Vec, + pub rustdocflags: Rc<[String]>, } /// Kind of each file generated by a Unit, part of `FileType`. @@ -147,6 +146,7 @@ impl TargetInfo { /// invocation is cached by [`Rustc::cached_output`]. /// /// Search `Tricky` to learn why querying `rustc` several times is needed. + #[tracing::instrument(skip_all)] pub fn new( gctx: &GlobalContext, requested_kinds: &[CompileKind], @@ -299,7 +299,7 @@ impl TargetInfo { crate_types: RefCell::new(map), sysroot, sysroot_target_libdir, - rustflags, + rustflags: rustflags.into(), rustdocflags: extra_args( gctx, requested_kinds, @@ -307,7 +307,8 @@ impl TargetInfo { Some(&cfg), kind, Flags::Rustdoc, - )?, + )? + .into(), cfg, support_split_debuginfo, }); @@ -388,7 +389,10 @@ impl TargetInfo { crate_type: Some(crate_type.clone()), should_replace_hyphens: true, }); - } else if target_triple.ends_with("windows-gnu") && suffix == ".dll" { + } else if suffix == ".dll" + && (target_triple.ends_with("windows-gnu") + || target_triple.ends_with("windows-gnullvm")) + { // See https://cygwin.com/cygwin-ug-net/dll.html for more // information about GNU import libraries. // LD can link DLL directly, but LLD requires the import library. @@ -853,7 +857,10 @@ pub struct RustcTargetData<'gctx> { /// Build information for the "host", which is information about when /// `rustc` is invoked without a `--target` flag. This is used for - /// procedural macros, build scripts, etc. + /// selecting a linker, and applying link overrides. + /// + /// The configuration read into this depends on whether or not + /// `target-applies-to-host=true`. host_config: TargetConfig, /// Information about the host platform. host_info: TargetInfo, @@ -865,6 +872,7 @@ pub struct RustcTargetData<'gctx> { } impl<'gctx> RustcTargetData<'gctx> { + #[tracing::instrument(skip_all)] pub fn new( ws: &Workspace<'gctx>, requested_kinds: &[CompileKind], @@ -874,7 +882,10 @@ impl<'gctx> RustcTargetData<'gctx> { let mut target_config = HashMap::new(); let mut target_info = HashMap::new(); let target_applies_to_host = gctx.target_applies_to_host()?; + let host_target = CompileTarget::new(&rustc.host)?; let host_info = TargetInfo::new(gctx, requested_kinds, &rustc, CompileKind::Host)?; + + // This config is used for link overrides and choosing a linker. let host_config = if target_applies_to_host { gctx.target_cfg_triple(&rustc.host)? } else { @@ -887,9 +898,21 @@ impl<'gctx> RustcTargetData<'gctx> { // needs access to the target config data, create a copy so that it // can be found. See `rebuild_unit_graph_shared` for why this is done. if requested_kinds.iter().any(CompileKind::is_host) { - let ct = CompileTarget::new(&rustc.host)?; - target_info.insert(ct, host_info.clone()); - target_config.insert(ct, gctx.target_cfg_triple(&rustc.host)?); + target_config.insert(host_target, gctx.target_cfg_triple(&rustc.host)?); + + // If target_applies_to_host is true, the host_info is the target info, + // otherwise we need to build target info for the target. + if target_applies_to_host { + target_info.insert(host_target, host_info.clone()); + } else { + let host_target_info = TargetInfo::new( + gctx, + requested_kinds, + &rustc, + CompileKind::Target(host_target), + )?; + target_info.insert(host_target, host_target_info); + } }; let mut res = RustcTargetData { @@ -1003,14 +1026,6 @@ impl<'gctx> RustcTargetData<'gctx> { CompileKind::Target(s) => &self.target_config[&s], } } - - /// If a build script is overridden, this returns the `BuildOutput` to use. - /// - /// `lib_name` is the `links` library name and `kind` is whether it is for - /// Host or Target. - pub fn script_override(&self, lib_name: &str, kind: CompileKind) -> Option<&BuildOutput> { - self.target_config(kind).links_overrides.get(lib_name) - } } /// Structure used to deal with Rustdoc fingerprinting @@ -1054,7 +1069,7 @@ impl RustDocFingerprint { let Ok(rustdoc_data) = paths::read(&fingerprint_path) else { // If the fingerprint does not exist, do not clear out the doc // directories. Otherwise this ran into problems where projects - // like rustbuild were creating the doc directory before running + // like bootstrap were creating the doc directory before running // `cargo doc` in a way that deleting it would break it. return write_fingerprint(); }; diff --git a/src/cargo/core/compiler/build_runner/compilation_files.rs b/src/cargo/core/compiler/build_runner/compilation_files.rs index 27c555a2694..667cf0379e0 100644 --- a/src/cargo/core/compiler/build_runner/compilation_files.rs +++ b/src/cargo/core/compiler/build_runner/compilation_files.rs @@ -121,7 +121,7 @@ pub struct OutputFile { /// If it should be linked into `target`, and what it should be called /// (e.g., without metadata). pub hardlink: Option, - /// If `--out-dir` is specified, the absolute path to the exported file. + /// If `--artifact-dir` is specified, the absolute path to the exported file. pub export_path: Option, /// Type of the file (library / debug symbol / else). pub flavor: FileFlavor, @@ -213,7 +213,7 @@ impl<'a, 'gctx: 'a> CompilationFiles<'a, 'gctx> { } } - /// Additional export directory from `--out-dir`. + /// Additional export directory from `--artifact-dir`. pub fn export_dir(&self) -> Option { self.export_dir.clone() } @@ -620,7 +620,7 @@ fn compute_metadata( unit.target.name().hash(&mut hasher); unit.target.kind().hash(&mut hasher); - hash_rustc_version(bcx, &mut hasher); + hash_rustc_version(bcx, &mut hasher, unit); if build_runner.bcx.ws.is_member(&unit.pkg) { // This is primarily here for clippy. This ensures that the clippy @@ -649,6 +649,26 @@ fn compute_metadata( // with user dependencies. unit.is_std.hash(&mut hasher); + // While we don't hash RUSTFLAGS because it may contain absolute paths that + // hurts reproducibility, we track whether a unit's RUSTFLAGS is from host + // config, so that we can generate a different metadata hash for runtime + // and compile-time units. + // + // HACK: This is a temporary hack for fixing rust-lang/cargo#14253 + // Need to find a long-term solution to replace this fragile workaround. + // See https://github.com/rust-lang/cargo/pull/14432#discussion_r1725065350 + if unit.kind.is_host() && !bcx.gctx.target_applies_to_host().unwrap_or_default() { + let host_info = bcx.target_data.info(CompileKind::Host); + let target_configs_are_different = unit.rustflags != host_info.rustflags + || unit.rustdocflags != host_info.rustdocflags + || bcx + .target_data + .target_config(CompileKind::Host) + .links_overrides + != unit.links_overrides; + target_configs_are_different.hash(&mut hasher); + } + MetaInfo { meta_hash: Metadata(hasher.finish()), use_extra_filename: should_use_metadata(bcx, unit), @@ -656,12 +676,19 @@ fn compute_metadata( } /// Hash the version of rustc being used during the build process. -fn hash_rustc_version(bcx: &BuildContext<'_, '_>, hasher: &mut StableHasher) { +fn hash_rustc_version(bcx: &BuildContext<'_, '_>, hasher: &mut StableHasher, unit: &Unit) { let vers = &bcx.rustc().version; if vers.pre.is_empty() || bcx.gctx.cli_unstable().separate_nightlies { // For stable, keep the artifacts separate. This helps if someone is - // testing multiple versions, to avoid recompiles. - bcx.rustc().verbose_version.hash(hasher); + // testing multiple versions, to avoid recompiles. Note though that for + // cross-compiled builds the `host:` line of `verbose_version` is + // omitted since rustc should produce the same output for each target + // regardless of the host. + for line in bcx.rustc().verbose_version.lines() { + if unit.kind.is_host() || !line.starts_with("host: ") { + line.hash(hasher); + } + } return; } // On "nightly"/"beta"/"dev"/etc, keep each "channel" separate. Don't hash @@ -674,7 +701,9 @@ fn hash_rustc_version(bcx: &BuildContext<'_, '_>, hasher: &mut StableHasher) { // Keep "host" since some people switch hosts to implicitly change // targets, (like gnu vs musl or gnu vs msvc). In the future, we may want // to consider hashing `unit.kind.short_name()` instead. - bcx.rustc().host.hash(hasher); + if unit.kind.is_host() { + bcx.rustc().host.hash(hasher); + } // None of the other lines are important. Currently they are: // binary: rustc <-- or "rustdoc" // commit-hash: 38114ff16e7856f98b2b4be7ab4cd29b38bed59a diff --git a/src/cargo/core/compiler/build_runner/mod.rs b/src/cargo/core/compiler/build_runner/mod.rs index ac99b734c8d..32651e72a8c 100644 --- a/src/cargo/core/compiler/build_runner/mod.rs +++ b/src/cargo/core/compiler/build_runner/mod.rs @@ -16,7 +16,7 @@ use jobserver::Client; use super::build_plan::BuildPlan; use super::custom_build::{self, BuildDeps, BuildScriptOutputs, BuildScripts}; -use super::fingerprint::Fingerprint; +use super::fingerprint::{Checksum, Fingerprint}; use super::job_queue::JobQueue; use super::layout::Layout; use super::lto::Lto; @@ -50,6 +50,8 @@ pub struct BuildRunner<'a, 'gctx> { pub fingerprints: HashMap>, /// Cache of file mtimes to reduce filesystem hits. pub mtime_cache: HashMap, + /// Cache of file checksums to reduce filesystem reads. + pub checksum_cache: HashMap, /// A set used to track which units have been compiled. /// A unit may appear in the job graph multiple times as a dependency of /// multiple packages, but it only needs to run once. @@ -100,8 +102,8 @@ impl<'a, 'gctx> BuildRunner<'a, 'gctx> { let jobserver = match bcx.gctx.jobserver_from_env() { Some(c) => c.clone(), None => { - let client = Client::new(bcx.jobs() as usize) - .with_context(|| "failed to create jobserver")?; + let client = + Client::new(bcx.jobs() as usize).context("failed to create jobserver")?; client.acquire_raw()?; client } @@ -113,6 +115,7 @@ impl<'a, 'gctx> BuildRunner<'a, 'gctx> { build_script_outputs: Arc::new(Mutex::new(BuildScriptOutputs::default())), fingerprints: HashMap::new(), mtime_cache: HashMap::new(), + checksum_cache: HashMap::new(), compiled: HashSet::new(), build_scripts: HashMap::new(), build_explicit_deps: HashMap::new(), @@ -126,6 +129,27 @@ impl<'a, 'gctx> BuildRunner<'a, 'gctx> { }) } + /// Dry-run the compilation without actually running it. + /// + /// This is expected to collect information like the location of output artifacts. + /// Please keep in sync with non-compilation part in [`BuildRunner::compile`]. + pub fn dry_run(mut self) -> CargoResult> { + let _lock = self + .bcx + .gctx + .acquire_package_cache_lock(CacheLockMode::Shared)?; + self.lto = super::lto::generate(self.bcx)?; + self.prepare_units()?; + self.prepare()?; + self.check_collisions()?; + + for unit in &self.bcx.roots { + self.collect_tests_and_executables(unit)?; + } + + Ok(self.compilation) + } + /// Starts compilation, waits for it to finish, and returns information /// about the result of compilation. /// @@ -214,31 +238,7 @@ impl<'a, 'gctx> BuildRunner<'a, 'gctx> { // Collect the result of the build into `self.compilation`. for unit in &self.bcx.roots { - // Collect tests and executables. - for output in self.outputs(unit)?.iter() { - if output.flavor == FileFlavor::DebugInfo || output.flavor == FileFlavor::Auxiliary - { - continue; - } - - let bindst = output.bin_dst(); - - if unit.mode == CompileMode::Test { - self.compilation - .tests - .push(self.unit_output(unit, &output.path)); - } else if unit.target.is_executable() { - self.compilation - .binaries - .push(self.unit_output(unit, bindst)); - } else if unit.target.is_cdylib() - && !self.compilation.cdylibs.iter().any(|uo| uo.unit == *unit) - { - self.compilation - .cdylibs - .push(self.unit_output(unit, bindst)); - } - } + self.collect_tests_and_executables(unit)?; // Collect information for `rustdoc --test`. if unit.mode.is_doc_test() { @@ -246,7 +246,7 @@ impl<'a, 'gctx> BuildRunner<'a, 'gctx> { let mut args = compiler::extern_args(&self, unit, &mut unstable_opts)?; args.extend(compiler::lto_args(&self, unit)); args.extend(compiler::features_args(unit)); - args.extend(compiler::check_cfg_args(&self, unit)); + args.extend(compiler::check_cfg_args(unit)); let script_meta = self.find_build_script_metadata(unit); if let Some(meta) = script_meta { @@ -256,12 +256,9 @@ impl<'a, 'gctx> BuildRunner<'a, 'gctx> { args.push(cfg.into()); } - if !output.check_cfgs.is_empty() { - args.push("-Zunstable-options".into()); - for check_cfg in &output.check_cfgs { - args.push("--check-cfg".into()); - args.push(check_cfg.into()); - } + for check_cfg in &output.check_cfgs { + args.push("--check-cfg".into()); + args.push(check_cfg.into()); } for (lt, arg) in &output.linker_args { @@ -272,7 +269,7 @@ impl<'a, 'gctx> BuildRunner<'a, 'gctx> { } } } - args.extend(self.bcx.rustdocflags_args(unit).iter().map(Into::into)); + args.extend(unit.rustdocflags.iter().map(Into::into)); use super::MessageFormat; let format = match self.bcx.build_config.message_format { @@ -310,6 +307,33 @@ impl<'a, 'gctx> BuildRunner<'a, 'gctx> { Ok(self.compilation) } + fn collect_tests_and_executables(&mut self, unit: &Unit) -> CargoResult<()> { + for output in self.outputs(unit)?.iter() { + if output.flavor == FileFlavor::DebugInfo || output.flavor == FileFlavor::Auxiliary { + continue; + } + + let bindst = output.bin_dst(); + + if unit.mode == CompileMode::Test { + self.compilation + .tests + .push(self.unit_output(unit, &output.path)); + } else if unit.target.is_executable() { + self.compilation + .binaries + .push(self.unit_output(unit, bindst)); + } else if unit.target.is_cdylib() + && !self.compilation.cdylibs.iter().any(|uo| uo.unit == *unit) + { + self.compilation + .cdylibs + .push(self.unit_output(unit, bindst)); + } + } + Ok(()) + } + /// Returns the executable for the specified unit (if any). pub fn get_executable(&mut self, unit: &Unit) -> CargoResult> { let is_binary = unit.target.is_executable(); @@ -357,11 +381,11 @@ impl<'a, 'gctx> BuildRunner<'a, 'gctx> { .unwrap() .host .prepare() - .with_context(|| "couldn't prepare build directories")?; + .context("couldn't prepare build directories")?; for target in self.files.as_mut().unwrap().target.values_mut() { target .prepare() - .with_context(|| "couldn't prepare build directories")?; + .context("couldn't prepare build directories")?; } let files = self.files.as_ref().unwrap(); @@ -577,7 +601,7 @@ impl<'a, 'gctx> BuildRunner<'a, 'gctx> { if let Some(ref export_path) = output.export_path { if let Some(other_unit) = output_collisions.insert(export_path.clone(), unit) { self.bcx.gctx.shell().warn(format!( - "`--out-dir` filename collision.\n\ + "`--artifact-dir` filename collision.\n\ {}\ The exported filenames should be unique.\n\ {}", diff --git a/src/cargo/core/compiler/compilation.rs b/src/cargo/core/compiler/compilation.rs index 405b17b9884..5ecf77d4c6a 100644 --- a/src/cargo/core/compiler/compilation.rs +++ b/src/cargo/core/compiler/compilation.rs @@ -323,7 +323,11 @@ impl<'gctx> Compilation<'gctx> { let dylib_path = paths::dylib_path(); let dylib_path_is_empty = dylib_path.is_empty(); - search_path.extend(dylib_path.into_iter()); + if dylib_path.starts_with(&search_path) { + search_path = dylib_path; + } else { + search_path.extend(dylib_path.into_iter()); + } if cfg!(target_os = "macos") && dylib_path_is_empty { // These are the defaults when DYLD_FALLBACK_LIBRARY_PATH isn't // set or set to an empty string. Since Cargo is explicitly setting @@ -356,6 +360,7 @@ impl<'gctx> Compilation<'gctx> { // in BuildContext::target_metadata() let rust_version = pkg.rust_version().as_ref().map(ToString::to_string); cmd.env("CARGO_MANIFEST_DIR", pkg.root()) + .env("CARGO_MANIFEST_PATH", pkg.manifest_path()) .env("CARGO_PKG_VERSION_MAJOR", &pkg.version().major.to_string()) .env("CARGO_PKG_VERSION_MINOR", &pkg.version().minor.to_string()) .env("CARGO_PKG_VERSION_PATCH", &pkg.version().patch.to_string()) diff --git a/src/cargo/core/compiler/custom_build.rs b/src/cargo/core/compiler/custom_build.rs index 829177f5fbb..f36e741c65b 100644 --- a/src/cargo/core/compiler/custom_build.rs +++ b/src/cargo/core/compiler/custom_build.rs @@ -61,7 +61,7 @@ const OLD_CARGO_WARNING_SYNTAX: &str = "cargo:warning="; /// [the doc]: https://doc.rust-lang.org/nightly/cargo/reference/build-scripts.html#cargo-warning const NEW_CARGO_WARNING_SYNTAX: &str = "cargo::warning="; /// Contains the parsed output of a custom build script. -#[derive(Clone, Debug, Hash, Default)] +#[derive(Clone, Debug, Hash, Default, PartialEq, Eq, PartialOrd, Ord)] pub struct BuildOutput { /// Paths to pass to rustc with the `-L` flag. pub library_paths: Vec, @@ -160,7 +160,7 @@ pub struct BuildDeps { /// See the [build script documentation][1] for more. /// /// [1]: https://doc.rust-lang.org/nightly/cargo/reference/build-scripts.html#cargorustc-link-argflag -#[derive(Clone, Hash, Debug, PartialEq, Eq)] +#[derive(Clone, Hash, Debug, PartialEq, Eq, PartialOrd, Ord)] pub enum LinkArgTarget { /// Represents `cargo::rustc-link-arg=FLAG`. All, @@ -279,6 +279,7 @@ fn build_work(build_runner: &mut BuildRunner<'_, '_>, unit: &Unit) -> CargoResul let debug = unit.profile.debuginfo.is_turned_on(); cmd.env("OUT_DIR", &script_out_dir) .env("CARGO_MANIFEST_DIR", unit.pkg.root()) + .env("CARGO_MANIFEST_PATH", unit.pkg.manifest_path()) .env("NUM_JOBS", &bcx.jobs().to_string()) .env("TARGET", bcx.target_data.short_name(&unit.kind)) .env("DEBUG", debug.to_string()) @@ -352,10 +353,7 @@ fn build_work(build_runner: &mut BuildRunner<'_, '_>, unit: &Unit) -> CargoResul cmd.env("RUSTC_WORKSPACE_WRAPPER", wrapper); } } - cmd.env( - "CARGO_ENCODED_RUSTFLAGS", - bcx.rustflags_args(unit).join("\x1f"), - ); + cmd.env("CARGO_ENCODED_RUSTFLAGS", unit.rustflags.join("\x1f")); cmd.env_remove("RUSTFLAGS"); if build_runner.bcx.ws.gctx().extra_verbose() { @@ -408,7 +406,6 @@ fn build_work(build_runner: &mut BuildRunner<'_, '_>, unit: &Unit) -> CargoResul paths::create_dir_all(&script_out_dir)?; let nightly_features_allowed = build_runner.bcx.gctx.nightly_features_allowed; - let extra_check_cfg = build_runner.bcx.gctx.cli_unstable().check_cfg; let targets: Vec = unit.pkg.targets().to_vec(); let msrv = unit.pkg.rust_version().cloned(); // Need a separate copy for the fresh closure. @@ -435,7 +432,7 @@ fn build_work(build_runner: &mut BuildRunner<'_, '_>, unit: &Unit) -> CargoResul // If we have an old build directory, then just move it into place, // otherwise create it! paths::create_dir_all(&script_out_dir) - .with_context(|| "failed to create script output directory for build command")?; + .context("failed to create script output directory for build command")?; // For all our native lib dependencies, pick up their metadata to pass // along to this custom build command. We're also careful to augment our @@ -556,7 +553,6 @@ fn build_work(build_runner: &mut BuildRunner<'_, '_>, unit: &Unit) -> CargoResul &pkg_descr, &script_out_dir, &script_out_dir, - extra_check_cfg, nightly_features_allowed, &targets, &msrv, @@ -585,7 +581,6 @@ fn build_work(build_runner: &mut BuildRunner<'_, '_>, unit: &Unit) -> CargoResul &pkg_descr, &prev_script_out_dir, &script_out_dir, - extra_check_cfg, nightly_features_allowed, &targets_fresh, &msrv_fresh, @@ -642,7 +637,6 @@ impl BuildOutput { pkg_descr: &str, script_out_dir_when_generated: &Path, script_out_dir: &Path, - extra_check_cfg: bool, nightly_features_allowed: bool, targets: &[Target], msrv: &Option, @@ -654,7 +648,6 @@ impl BuildOutput { pkg_descr, script_out_dir_when_generated, script_out_dir, - extra_check_cfg, nightly_features_allowed, targets, msrv, @@ -665,9 +658,6 @@ impl BuildOutput { /// /// * `pkg_descr` --- for error messages /// * `library_name` --- for determining if `RUSTC_BOOTSTRAP` should be allowed - /// * `extra_check_cfg` --- for unstable feature [`-Zcheck-cfg`] - /// - /// [`-Zcheck-cfg`]: https://doc.rust-lang.org/cargo/reference/unstable.html#check-cfg pub fn parse( input: &[u8], // Takes String instead of InternedString so passing `unit.pkg.name()` will give a compile error. @@ -675,7 +665,6 @@ impl BuildOutput { pkg_descr: &str, script_out_dir_when_generated: &Path, script_out_dir: &Path, - extra_check_cfg: bool, nightly_features_allowed: bool, targets: &[Target], msrv: &Option, @@ -720,16 +709,35 @@ impl BuildOutput { const DOCS_LINK_SUGGESTION: &str = "See https://doc.rust-lang.org/cargo/reference/build-scripts.html#outputs-of-the-build-script \ for more information about build script outputs."; + fn has_reserved_prefix(flag: &str) -> bool { + RESERVED_PREFIXES + .iter() + .any(|reserved_prefix| flag.starts_with(reserved_prefix)) + } + fn check_minimum_supported_rust_version_for_new_syntax( pkg_descr: &str, msrv: &Option, + flag: &str, ) -> CargoResult<()> { if let Some(msrv) = msrv { let new_syntax_added_in = RustVersion::from_str("1.77.0")?; if !new_syntax_added_in.is_compatible_with(msrv.as_partial()) { + let old_syntax_suggestion = if has_reserved_prefix(flag) { + format!( + "Switch to the old `cargo:{flag}` syntax (note the single colon).\n" + ) + } else if flag.starts_with("metadata=") { + let old_format_flag = flag.strip_prefix("metadata=").unwrap(); + format!("Switch to the old `cargo:{old_format_flag}` syntax instead of `cargo::{flag}` (note the single colon).\n") + } else { + String::new() + }; + bail!( "the `cargo::` syntax for build script output instructions was added in \ Rust 1.77.0, but the minimum supported Rust version of `{pkg_descr}` is {msrv}.\n\ + {old_syntax_suggestion}\ {DOCS_LINK_SUGGESTION}" ); } @@ -791,16 +799,13 @@ impl BuildOutput { }; let mut old_syntax = false; let (key, value) = if let Some(data) = line.strip_prefix("cargo::") { - check_minimum_supported_rust_version_for_new_syntax(pkg_descr, msrv)?; + check_minimum_supported_rust_version_for_new_syntax(pkg_descr, msrv, data)?; // For instance, `cargo::rustc-flags=foo` or `cargo::metadata=foo=bar`. parse_directive(whence.as_str(), line, data, old_syntax)? } else if let Some(data) = line.strip_prefix("cargo:") { old_syntax = true; // For instance, `cargo:rustc-flags=foo`. - if RESERVED_PREFIXES - .iter() - .any(|prefix| data.starts_with(prefix)) - { + if has_reserved_prefix(data) { parse_directive(whence.as_str(), line, data, old_syntax)? } else { // For instance, `cargo:foo=bar`. @@ -906,14 +911,7 @@ impl BuildOutput { linker_args.push((LinkArgTarget::All, value)); } "rustc-cfg" => cfgs.push(value.to_string()), - "rustc-check-cfg" => { - if extra_check_cfg { - check_cfgs.push(value.to_string()); - } else { - // silently ignoring the instruction to try to - // minimise MSRV annoyance when stabilizing -Zcheck-cfg - } - } + "rustc-check-cfg" => check_cfgs.push(value.to_string()), "rustc-env" => { let (key, val) = BuildOutput::parse_rustc_env(&value, &whence)?; // Build scripts aren't allowed to set RUSTC_BOOTSTRAP. @@ -1153,11 +1151,7 @@ pub fn build_map(build_runner: &mut BuildRunner<'_, '_>) -> CargoResult<()> { // If there is a build script override, pre-fill the build output. if unit.mode.is_run_custom_build() { if let Some(links) = unit.pkg.manifest().links() { - if let Some(output) = build_runner - .bcx - .target_data - .script_override(links, unit.kind) - { + if let Some(output) = unit.links_overrides.get(links) { let metadata = build_runner.get_run_build_script_metadata(unit); build_runner.build_script_outputs.lock().unwrap().insert( unit.pkg.package_id(), @@ -1254,7 +1248,6 @@ fn prev_build_output( &unit.pkg.to_string(), &prev_script_out_dir, &script_out_dir, - build_runner.bcx.gctx.cli_unstable().check_cfg, build_runner.bcx.gctx.nightly_features_allowed, unit.pkg.targets(), &unit.pkg.rust_version().cloned(), diff --git a/src/cargo/core/compiler/fingerprint/dirty_reason.rs b/src/cargo/core/compiler/fingerprint/dirty_reason.rs index cb6548a41a5..2da7d1e6d88 100644 --- a/src/cargo/core/compiler/fingerprint/dirty_reason.rs +++ b/src/cargo/core/compiler/fingerprint/dirty_reason.rs @@ -34,6 +34,9 @@ pub enum DirtyReason { old: String, new: String, }, + ChecksumUseChanged { + old: bool, + }, DepInfoOutputChanged { old: PathBuf, new: PathBuf, @@ -183,6 +186,16 @@ impl DirtyReason { DirtyReason::PrecalculatedComponentsChanged { .. } => { s.dirty_because(unit, "the precalculated components changed") } + DirtyReason::ChecksumUseChanged { old } => { + if *old { + s.dirty_because( + unit, + "the prior compilation used checksum freshness and this one does not", + ) + } else { + s.dirty_because(unit, "checksum freshness requested, prior compilation did not use checksum freshness") + } + } DirtyReason::DepInfoOutputChanged { .. } => { s.dirty_because(unit, "the dependency info output changed") } @@ -222,6 +235,20 @@ impl DirtyReason { format_args!("the file `{}` is missing", file.display()), ) } + StaleItem::UnableToReadFile(file) => { + let file = file.strip_prefix(root).unwrap_or(&file); + s.dirty_because( + unit, + format_args!("the file `{}` could not be read", file.display()), + ) + } + StaleItem::FailedToReadMetadata(file) => { + let file = file.strip_prefix(root).unwrap_or(&file); + s.dirty_because( + unit, + format_args!("couldn't read metadata for file `{}`", file.display()), + ) + } StaleItem::ChangedFile { stale, stale_mtime, @@ -235,6 +262,41 @@ impl DirtyReason { format_args!("the file `{}` has changed ({after})", file.display()), ) } + StaleItem::ChangedChecksum { + source, + stored_checksum, + new_checksum, + } => { + let file = source.strip_prefix(root).unwrap_or(&source); + s.dirty_because( + unit, + format_args!( + "the file `{}` has changed (checksum didn't match, {stored_checksum} != {new_checksum})", + file.display(), + ), + ) + } + StaleItem::FileSizeChanged { + path, + old_size, + new_size, + } => { + let file = path.strip_prefix(root).unwrap_or(&path); + s.dirty_because( + unit, + format_args!( + "file size changed ({old_size} != {new_size}) for `{}`", + file.display() + ), + ) + } + StaleItem::MissingChecksum(path) => { + let file = path.strip_prefix(root).unwrap_or(&path); + s.dirty_because( + unit, + format_args!("the checksum for file `{}` is missing", file.display()), + ) + } StaleItem::ChangedEnv { var, .. } => s.dirty_because( unit, format_args!("the environment variable {var} changed"), diff --git a/src/cargo/core/compiler/fingerprint/mod.rs b/src/cargo/core/compiler/fingerprint/mod.rs index f3389b49f58..e6fff90932a 100644 --- a/src/cargo/core/compiler/fingerprint/mod.rs +++ b/src/cargo/core/compiler/fingerprint/mod.rs @@ -33,6 +33,12 @@ //! details. If any input files are missing, or are newer than the //! dep-info, then the unit is dirty. //! +//! - Alternatively if you're using the unstable feature `checksum-freshness` +//! mtimes are ignored entirely in favor of comparing first the file size, and +//! then the checksum with a known prior value emitted by rustc. Only nightly +//! rustc will emit the needed metadata at the time of writing. This is dependent +//! on the unstable feature `-Z checksum-hash-algorithm`. +//! //! Note: Fingerprinting is not a perfect solution. Filesystem mtime tracking //! is notoriously imprecise and problematic. Only a small part of the //! environment is captured. This is a balance of performance, simplicity, and @@ -80,6 +86,7 @@ //! config settings[^5] | βœ“ | //! is_std | | βœ“ //! `[lints]` table[^6] | βœ“ | +//! `[lints.rust.unexpected_cfgs.check-cfg]` | βœ“ | //! //! [^1]: Build script and bin dependencies are not included. //! @@ -357,15 +364,17 @@ mod dirty_reason; use std::collections::hash_map::{Entry, HashMap}; use std::env; +use std::fmt::{self, Display}; +use std::fs::{self, File}; use std::hash::{self, Hash, Hasher}; -use std::io; +use std::io::{self, Read}; use std::path::{Path, PathBuf}; -use std::str; +use std::str::{self, from_utf8, FromStr}; use std::sync::{Arc, Mutex}; use std::time::SystemTime; use anyhow::{bail, format_err, Context as _}; -use cargo_util::{paths, ProcessBuilder}; +use cargo_util::{paths, ProcessBuilder, Sha256}; use filetime::FileTime; use serde::de; use serde::ser; @@ -724,7 +733,10 @@ enum LocalFingerprint { /// The `dep_info` file, when present, also lists a number of other files /// for us to look at. If any of those files are newer than this file then /// we need to recompile. - CheckDepInfo { dep_info: PathBuf }, + /// + /// If the `checksum` bool is true then the dep_info file is expected to + /// contain file checksums instead of file mtimes. + CheckDepInfo { dep_info: PathBuf, checksum: bool }, /// This represents a nonempty set of `rerun-if-changed` annotations printed /// out by a build script. The `output` file is a relative file anchored at @@ -751,12 +763,25 @@ enum LocalFingerprint { #[derive(Clone, Debug)] pub enum StaleItem { MissingFile(PathBuf), + UnableToReadFile(PathBuf), + FailedToReadMetadata(PathBuf), + FileSizeChanged { + path: PathBuf, + old_size: u64, + new_size: u64, + }, ChangedFile { reference: PathBuf, reference_mtime: FileTime, stale: PathBuf, stale_mtime: FileTime, }, + ChangedChecksum { + source: PathBuf, + stored_checksum: Checksum, + new_checksum: Checksum, + }, + MissingChecksum(PathBuf), ChangedEnv { var: String, previous: Option, @@ -792,6 +817,7 @@ impl LocalFingerprint { fn find_stale_item( &self, mtime_cache: &mut HashMap, + checksum_cache: &mut HashMap, pkg_root: &Path, target_root: &Path, cargo_exe: &Path, @@ -804,7 +830,7 @@ impl LocalFingerprint { // matches, and for each file we see if any of them are newer than // the `dep_info` file itself whose mtime represents the start of // rustc. - LocalFingerprint::CheckDepInfo { dep_info } => { + LocalFingerprint::CheckDepInfo { dep_info, checksum } => { let dep_info = target_root.join(dep_info); let Some(info) = parse_dep_info(pkg_root, target_root, &dep_info)? else { return Ok(Some(StaleItem::MissingFile(dep_info))); @@ -834,15 +860,33 @@ impl LocalFingerprint { current, })); } - Ok(find_stale_file(mtime_cache, &dep_info, info.files.iter())) + if *checksum { + Ok(find_stale_file( + mtime_cache, + checksum_cache, + &dep_info, + info.files.iter().map(|(file, checksum)| (file, *checksum)), + *checksum, + )) + } else { + Ok(find_stale_file( + mtime_cache, + checksum_cache, + &dep_info, + info.files.into_keys().map(|p| (p, None)), + *checksum, + )) + } } // We need to verify that no paths listed in `paths` are newer than // the `output` path itself, or the last time the build script ran. LocalFingerprint::RerunIfChanged { output, paths } => Ok(find_stale_file( mtime_cache, + checksum_cache, &target_root.join(output), - paths.iter().map(|p| pkg_root.join(p)), + paths.iter().map(|p| (pkg_root.join(p), None)), + false, )), // These have no dependencies on the filesystem, and their values @@ -964,8 +1008,14 @@ impl Fingerprint { } } ( - LocalFingerprint::CheckDepInfo { dep_info: adep }, - LocalFingerprint::CheckDepInfo { dep_info: bdep }, + LocalFingerprint::CheckDepInfo { + dep_info: adep, + checksum: checksum_a, + }, + LocalFingerprint::CheckDepInfo { + dep_info: bdep, + checksum: checksum_b, + }, ) => { if adep != bdep { return DirtyReason::DepInfoOutputChanged { @@ -973,6 +1023,9 @@ impl Fingerprint { new: adep.clone(), }; } + if checksum_a != checksum_b { + return DirtyReason::ChecksumUseChanged { old: *checksum_b }; + } } ( LocalFingerprint::RerunIfChanged { @@ -1076,6 +1129,7 @@ impl Fingerprint { fn check_filesystem( &mut self, mtime_cache: &mut HashMap, + checksum_cache: &mut HashMap, pkg_root: &Path, target_root: &Path, cargo_exe: &Path, @@ -1180,9 +1234,14 @@ impl Fingerprint { // files for this package itself. If we do find something log a helpful // message and bail out so we stay stale. for local in self.local.get_mut().unwrap().iter() { - if let Some(item) = - local.find_stale_item(mtime_cache, pkg_root, target_root, cargo_exe, gctx)? - { + if let Some(item) = local.find_stale_item( + mtime_cache, + checksum_cache, + pkg_root, + target_root, + cargo_exe, + gctx, + )? { item.log(); self.fs_status = FsStatus::StaleItem(item); return Ok(()); @@ -1292,6 +1351,12 @@ impl StaleItem { StaleItem::MissingFile(path) => { info!("stale: missing {:?}", path); } + StaleItem::UnableToReadFile(path) => { + info!("stale: unable to read {:?}", path); + } + StaleItem::FailedToReadMetadata(path) => { + info!("stale: couldn't read metadata {:?}", path); + } StaleItem::ChangedFile { reference, reference_mtime, @@ -1302,6 +1367,27 @@ impl StaleItem { info!(" (vs) {:?}", reference); info!(" {:?} < {:?}", reference_mtime, stale_mtime); } + StaleItem::FileSizeChanged { + path, + new_size, + old_size, + } => { + info!("stale: changed {:?}", path); + info!("prior file size {old_size}"); + info!(" new file size {new_size}"); + } + StaleItem::ChangedChecksum { + source, + stored_checksum, + new_checksum, + } => { + info!("stale: changed {:?}", source); + info!("prior checksum {stored_checksum}"); + info!(" new checksum {new_checksum}"); + } + StaleItem::MissingChecksum(path) => { + info!("stale: no prior checksum {:?}", path); + } StaleItem::ChangedEnv { var, previous, @@ -1346,6 +1432,7 @@ fn calculate(build_runner: &mut BuildRunner<'_, '_>, unit: &Unit) -> CargoResult let cargo_exe = build_runner.bcx.gctx.cargo_exe()?; fingerprint.check_filesystem( &mut build_runner.mtime_cache, + &mut build_runner.checksum_cache, unit.pkg.root(), &target_root, cargo_exe, @@ -1398,7 +1485,10 @@ fn calculate_normal( } else { let dep_info = dep_info_loc(build_runner, unit); let dep_info = dep_info.strip_prefix(&target_root).unwrap().to_path_buf(); - vec![LocalFingerprint::CheckDepInfo { dep_info }] + vec![LocalFingerprint::CheckDepInfo { + dep_info, + checksum: build_runner.bcx.gctx.cli_unstable().checksum_freshness, + }] }; // Figure out what the outputs of our unit is, and we'll be storing them @@ -1414,9 +1504,9 @@ fn calculate_normal( // hashed to take up less space on disk as we just need to know when things // change. let extra_flags = if unit.mode.is_doc() || unit.mode.is_doc_scrape() { - build_runner.bcx.rustdocflags_args(unit) + &unit.rustdocflags } else { - build_runner.bcx.rustflags_args(unit) + &unit.rustflags } .to_vec(); @@ -1454,14 +1544,7 @@ fn calculate_normal( // actually affect the output artifact so there's no need to hash it. path: util::hash_u64(path_args(build_runner.bcx.ws, unit).0), features: format!("{:?}", unit.features), - // Note we curently only populate `declared_features` when `-Zcheck-cfg` - // is passed since it's the only user-facing toggle that will make this - // fingerprint relevant. - declared_features: if build_runner.bcx.gctx.cli_unstable().check_cfg { - format!("{declared_features:?}") - } else { - "".to_string() - }, + declared_features: format!("{declared_features:?}"), deps, local: Mutex::new(local), memoized_hash: Mutex::new(None), @@ -1496,7 +1579,7 @@ fn calculate_run_custom_build( An I/O error happened. Please make sure you can access the file. By default, if your project contains a build script, cargo scans all files in -it to determine whether a rebuild is needed. If you don't expect to access the +it to determine whether a rebuild is needed. If you don't expect to access the file, specify `rerun-if-changed` in your build script. See https://doc.rust-lang.org/cargo/reference/build-scripts.html#rerun-if-changed for more information."; pkg_fingerprint(build_runner.bcx, &unit.pkg).map_err(|err| { @@ -1526,7 +1609,7 @@ See https://doc.rust-lang.org/cargo/reference/build-scripts.html#rerun-if-change .collect::>>()? }; - let rustflags = build_runner.bcx.rustflags_args(unit).to_vec(); + let rustflags = unit.rustflags.to_vec(); Ok(Fingerprint { local: Mutex::new(local), @@ -1849,16 +1932,31 @@ pub fn parse_dep_info( }; let mut ret = RustcDepInfo::default(); ret.env = info.env; - ret.files.extend(info.files.into_iter().map(|(ty, path)| { - match ty { - DepInfoPathType::PackageRootRelative => pkg_root.join(path), - // N.B. path might be absolute here in which case the join will have no effect - DepInfoPathType::TargetRootRelative => target_root.join(path), - } - })); + ret.files + .extend(info.files.into_iter().map(|(ty, path, checksum_info)| { + ( + make_absolute_path(ty, pkg_root, target_root, path), + checksum_info.and_then(|(file_len, checksum)| { + Checksum::from_str(&checksum).ok().map(|c| (file_len, c)) + }), + ) + })); Ok(Some(ret)) } +fn make_absolute_path( + ty: DepInfoPathType, + pkg_root: &Path, + target_root: &Path, + path: PathBuf, +) -> PathBuf { + match ty { + DepInfoPathType::PackageRootRelative => pkg_root.join(path), + // N.B. path might be absolute here in which case the join will have no effect + DepInfoPathType::TargetRootRelative => target_root.join(path), + } +} + /// Calculates the fingerprint of a unit thats contains no dep-info files. fn pkg_fingerprint(bcx: &BuildContext<'_, '_>, pkg: &Package) -> CargoResult { let source_id = pkg.package_id().source_id(); @@ -1871,14 +1969,16 @@ fn pkg_fingerprint(bcx: &BuildContext<'_, '_>, pkg: &Package) -> CargoResult( +fn find_stale_file( mtime_cache: &mut HashMap, + checksum_cache: &mut HashMap, reference: &Path, paths: I, + use_checksums: bool, ) -> Option where - I: IntoIterator, - I::Item: AsRef, + I: IntoIterator)>, + P: AsRef, { let Ok(reference_mtime) = paths::mtime(reference) else { return Some(StaleItem::MissingFile(reference.to_path_buf())); @@ -1893,8 +1993,7 @@ where } else { None }; - - for path in paths { + for (path, prior_checksum) in paths { let path = path.as_ref(); // Assuming anything in cargo_home/{git, registry} is immutable @@ -1906,44 +2005,82 @@ where continue; } } - let path_mtime = match mtime_cache.entry(path.to_path_buf()) { - Entry::Occupied(o) => *o.get(), - Entry::Vacant(v) => { - let Ok(mtime) = paths::mtime_recursive(path) else { - return Some(StaleItem::MissingFile(path.to_path_buf())); - }; - *v.insert(mtime) + if use_checksums { + let Some((file_len, prior_checksum)) = prior_checksum else { + return Some(StaleItem::MissingChecksum(path.to_path_buf())); + }; + let path_buf = path.to_path_buf(); + + let path_checksum = match checksum_cache.entry(path_buf) { + Entry::Occupied(o) => *o.get(), + Entry::Vacant(v) => { + let Ok(current_file_len) = fs::metadata(&path).map(|m| m.len()) else { + return Some(StaleItem::FailedToReadMetadata(path.to_path_buf())); + }; + let Ok(file) = File::open(path) else { + return Some(StaleItem::MissingFile(path.to_path_buf())); + }; + if current_file_len != file_len { + return Some(StaleItem::FileSizeChanged { + path: path.to_path_buf(), + new_size: current_file_len, + old_size: file_len, + }); + } + let Ok(checksum) = Checksum::compute(prior_checksum.algo, file) else { + return Some(StaleItem::UnableToReadFile(path.to_path_buf())); + }; + *v.insert(checksum) + } + }; + if path_checksum == prior_checksum { + continue; } - }; + return Some(StaleItem::ChangedChecksum { + source: path.to_path_buf(), + stored_checksum: prior_checksum, + new_checksum: path_checksum, + }); + } else { + let path_mtime = match mtime_cache.entry(path.to_path_buf()) { + Entry::Occupied(o) => *o.get(), + Entry::Vacant(v) => { + let Ok(mtime) = paths::mtime_recursive(path) else { + return Some(StaleItem::MissingFile(path.to_path_buf())); + }; + *v.insert(mtime) + } + }; - // TODO: fix #5918. - // Note that equal mtimes should be considered "stale". For filesystems with - // not much timestamp precision like 1s this is would be a conservative approximation - // to handle the case where a file is modified within the same second after - // a build starts. We want to make sure that incremental rebuilds pick that up! - // - // For filesystems with nanosecond precision it's been seen in the wild that - // its "nanosecond precision" isn't really nanosecond-accurate. It turns out that - // kernels may cache the current time so files created at different times actually - // list the same nanosecond precision. Some digging on #5919 picked up that the - // kernel caches the current time between timer ticks, which could mean that if - // a file is updated at most 10ms after a build starts then Cargo may not - // pick up the build changes. - // - // All in all, an equality check here would be a conservative assumption that, - // if equal, files were changed just after a previous build finished. - // Unfortunately this became problematic when (in #6484) cargo switch to more accurately - // measuring the start time of builds. - if path_mtime <= reference_mtime { - continue; - } + // TODO: fix #5918. + // Note that equal mtimes should be considered "stale". For filesystems with + // not much timestamp precision like 1s this is would be a conservative approximation + // to handle the case where a file is modified within the same second after + // a build starts. We want to make sure that incremental rebuilds pick that up! + // + // For filesystems with nanosecond precision it's been seen in the wild that + // its "nanosecond precision" isn't really nanosecond-accurate. It turns out that + // kernels may cache the current time so files created at different times actually + // list the same nanosecond precision. Some digging on #5919 picked up that the + // kernel caches the current time between timer ticks, which could mean that if + // a file is updated at most 10ms after a build starts then Cargo may not + // pick up the build changes. + // + // All in all, an equality check here would be a conservative assumption that, + // if equal, files were changed just after a previous build finished. + // Unfortunately this became problematic when (in #6484) cargo switch to more accurately + // measuring the start time of builds. + if path_mtime <= reference_mtime { + continue; + } - return Some(StaleItem::ChangedFile { - reference: reference.to_path_buf(), - reference_mtime, - stale: path.to_path_buf(), - stale_mtime: path_mtime, - }); + return Some(StaleItem::ChangedFile { + reference: reference.to_path_buf(), + reference_mtime, + stale: path.to_path_buf(), + stale_mtime: path_mtime, + }); + } } debug!( @@ -1955,6 +2092,7 @@ where /// Tells the associated path in [`EncodedDepInfo::files`] is relative to package root, /// target root, or absolute. +#[derive(Debug, Eq, PartialEq, Hash, Copy, Clone)] enum DepInfoPathType { /// src/, e.g. src/lib.rs PackageRootRelative, @@ -2034,7 +2172,7 @@ pub fn translate_dep_info( .env .retain(|(key, _)| !rustc_cmd.get_envs().contains_key(key) || key == CARGO_ENV); - for file in depinfo.files { + let serialize_path = |file| { // The path may be absolute or relative, canonical or not. Make sure // it is canonicalized so we are comparing the same kinds of paths. let abs_file = rustc_cwd.join(file); @@ -2047,7 +2185,7 @@ pub fn translate_dep_info( (DepInfoPathType::TargetRootRelative, stripped) } else if let Ok(stripped) = canon_file.strip_prefix(&pkg_root) { if !allow_package { - continue; + return None; } (DepInfoPathType::PackageRootRelative, stripped) } else { @@ -2056,7 +2194,18 @@ pub fn translate_dep_info( // effect. (DepInfoPathType::TargetRootRelative, &*abs_file) }; - on_disk_info.files.push((ty, path.to_owned())); + Some((ty, path.to_owned())) + }; + + for (file, checksum_info) in depinfo.files { + let Some((path_type, path)) = serialize_path(file) else { + continue; + }; + on_disk_info.files.push(( + path_type, + path, + checksum_info.map(|(len, checksum)| (len, checksum.to_string())), + )); } paths::write(cargo_dep_info, on_disk_info.serialize()?)?; Ok(()) @@ -2066,7 +2215,7 @@ pub fn translate_dep_info( #[derive(Default)] pub struct RustcDepInfo { /// The list of files that the main target in the dep-info file depends on. - pub files: Vec, + pub files: HashMap>, /// The list of environment variables we found that the rustc compilation /// depends on. /// @@ -2084,7 +2233,7 @@ pub struct RustcDepInfo { /// Cargo will read it for crates on all future compilations. #[derive(Default)] struct EncodedDepInfo { - files: Vec<(DepInfoPathType, PathBuf)>, + files: Vec<(DepInfoPathType, PathBuf, Option<(u64, String)>)>, env: Vec<(String, Option)>, } @@ -2092,19 +2241,30 @@ impl EncodedDepInfo { fn parse(mut bytes: &[u8]) -> Option { let bytes = &mut bytes; let nfiles = read_usize(bytes)?; - let mut files = Vec::with_capacity(nfiles as usize); + let mut files = Vec::with_capacity(nfiles); for _ in 0..nfiles { let ty = match read_u8(bytes)? { 0 => DepInfoPathType::PackageRootRelative, 1 => DepInfoPathType::TargetRootRelative, _ => return None, }; - let bytes = read_bytes(bytes)?; - files.push((ty, paths::bytes2path(bytes).ok()?)); + let path_bytes = read_bytes(bytes)?; + let path = paths::bytes2path(path_bytes).ok()?; + let has_checksum = read_bool(bytes)?; + let checksum_info = has_checksum + .then(|| { + let file_len = read_u64(bytes); + let checksum_string = read_bytes(bytes) + .map(Vec::from) + .and_then(|v| String::from_utf8(v).ok()); + file_len.zip(checksum_string) + }) + .flatten(); + files.push((ty, path, checksum_info)); } let nenv = read_usize(bytes)?; - let mut env = Vec::with_capacity(nenv as usize); + let mut env = Vec::with_capacity(nenv); for _ in 0..nenv { let key = str::from_utf8(read_bytes(bytes)?).ok()?.to_string(); let val = match read_u8(bytes)? { @@ -2122,6 +2282,16 @@ impl EncodedDepInfo { Some(u32::from_le_bytes(ret.try_into().unwrap()) as usize) } + fn read_u64(bytes: &mut &[u8]) -> Option { + let ret = bytes.get(..8)?; + *bytes = &bytes[8..]; + Some(u64::from_le_bytes(ret.try_into().unwrap())) + } + + fn read_bool(bytes: &mut &[u8]) -> Option { + read_u8(bytes).map(|b| b != 0) + } + fn read_u8(bytes: &mut &[u8]) -> Option { let ret = *bytes.get(0)?; *bytes = &bytes[1..]; @@ -2140,12 +2310,17 @@ impl EncodedDepInfo { let mut ret = Vec::new(); let dst = &mut ret; write_usize(dst, self.files.len()); - for (ty, file) in self.files.iter() { + for (ty, file, checksum_info) in self.files.iter() { match ty { DepInfoPathType::PackageRootRelative => dst.push(0), DepInfoPathType::TargetRootRelative => dst.push(1), } write_bytes(dst, paths::path2bytes(file)?); + write_bool(dst, checksum_info.is_some()); + if let Some((len, checksum)) = checksum_info { + write_u64(dst, *len); + write_bytes(dst, checksum); + } } write_usize(dst, self.env.len()); @@ -2170,6 +2345,14 @@ impl EncodedDepInfo { fn write_usize(dst: &mut Vec, val: usize) { dst.extend(&u32::to_le_bytes(val as u32)); } + + fn write_u64(dst: &mut Vec, val: u64) { + dst.extend(&u64::to_le_bytes(val)); + } + + fn write_bool(dst: &mut Vec, val: bool) { + dst.push(u8::from(val)); + } } } @@ -2206,8 +2389,24 @@ pub fn parse_rustc_dep_info(rustc_dep_info: &Path) -> CargoResult internal("malformed dep-info format, trailing \\".to_string()) })?); } - ret.files.push(file.into()); + ret.files.entry(file.into()).or_default(); } + } else if let Some(rest) = line.strip_prefix("# checksum:") { + let mut parts = rest.splitn(3, ' '); + let Some(checksum) = parts.next().map(Checksum::from_str).transpose()? else { + continue; + }; + let Some(Ok(file_len)) = parts + .next() + .and_then(|s| s.strip_prefix("file_len:").map(|s| s.parse::())) + else { + continue; + }; + let Some(path) = parts.next().map(PathBuf::from) else { + continue; + }; + + ret.files.insert(path, Some((file_len, checksum))); } } return Ok(ret); @@ -2234,3 +2433,164 @@ pub fn parse_rustc_dep_info(rustc_dep_info: &Path) -> CargoResult Ok(ret) } } + +/// Some algorithms are here to ensure compatibility with possible rustc outputs. +/// The presence of an algorithm here is not a suggestion that it's fit for use. +#[derive(Copy, Clone, Debug, Eq, PartialEq)] +pub enum ChecksumAlgo { + Sha256, + Blake3, +} + +impl ChecksumAlgo { + fn hash_len(&self) -> usize { + match self { + ChecksumAlgo::Sha256 | ChecksumAlgo::Blake3 => 32, + } + } +} + +impl FromStr for ChecksumAlgo { + type Err = InvalidChecksum; + + fn from_str(s: &str) -> Result { + match s { + "sha256" => Ok(Self::Sha256), + "blake3" => Ok(Self::Blake3), + _ => Err(InvalidChecksum::InvalidChecksumAlgo), + } + } +} + +impl Display for ChecksumAlgo { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(match self { + ChecksumAlgo::Sha256 => "sha256", + ChecksumAlgo::Blake3 => "blake3", + }) + } +} + +#[derive(Copy, Clone, Debug, Eq, PartialEq)] +pub struct Checksum { + algo: ChecksumAlgo, + /// If the algorithm uses fewer than 32 bytes, then the remaining bytes will be zero. + value: [u8; 32], +} + +impl Checksum { + pub fn new(algo: ChecksumAlgo, value: [u8; 32]) -> Self { + Self { algo, value } + } + + pub fn compute(algo: ChecksumAlgo, contents: impl Read) -> Result { + // Buffer size is the recommended amount to fully leverage SIMD instructions on AVX-512 as per + // blake3 documentation. + let mut buf = vec![0; 16 * 1024]; + let mut ret = Self { + algo, + value: [0; 32], + }; + let len = algo.hash_len(); + let value = &mut ret.value[..len]; + + fn digest( + mut hasher: T, + mut update: impl FnMut(&mut T, &[u8]), + finish: impl FnOnce(T, &mut [u8]), + mut contents: impl Read, + buf: &mut [u8], + value: &mut [u8], + ) -> Result<(), io::Error> { + loop { + let bytes_read = contents.read(buf)?; + if bytes_read == 0 { + break; + } + update(&mut hasher, &buf[0..bytes_read]); + } + finish(hasher, value); + Ok(()) + } + + match algo { + ChecksumAlgo::Sha256 => { + digest( + Sha256::new(), + |h, b| { + h.update(b); + }, + |mut h, out| out.copy_from_slice(&h.finish()), + contents, + &mut buf, + value, + )?; + } + ChecksumAlgo::Blake3 => { + digest( + blake3::Hasher::new(), + |h, b| { + h.update(b); + }, + |h, out| out.copy_from_slice(h.finalize().as_bytes()), + contents, + &mut buf, + value, + )?; + } + } + Ok(ret) + } + + pub fn algo(&self) -> ChecksumAlgo { + self.algo + } + + pub fn value(&self) -> &[u8; 32] { + &self.value + } +} + +impl FromStr for Checksum { + type Err = InvalidChecksum; + + fn from_str(s: &str) -> Result { + let mut parts = s.split('='); + let Some(algo) = parts.next().map(ChecksumAlgo::from_str).transpose()? else { + return Err(InvalidChecksum::InvalidFormat); + }; + let Some(checksum) = parts.next() else { + return Err(InvalidChecksum::InvalidFormat); + }; + let mut value = [0; 32]; + if hex::decode_to_slice(checksum, &mut value[0..algo.hash_len()]).is_err() { + return Err(InvalidChecksum::InvalidChecksum(algo)); + } + Ok(Self { algo, value }) + } +} + +impl Display for Checksum { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let mut checksum = [0; 64]; + let hash_len = self.algo.hash_len(); + hex::encode_to_slice(&self.value[0..hash_len], &mut checksum[0..(hash_len * 2)]) + .map_err(|_| fmt::Error)?; + write!( + f, + "{}={}", + self.algo, + from_utf8(&checksum[0..(hash_len * 2)]).unwrap_or_default() + ) + } +} + +#[derive(Debug, thiserror::Error)] +pub enum InvalidChecksum { + #[error("algorithm portion incorrect, expected `sha256`, or `blake3`")] + InvalidChecksumAlgo, + #[error("expected {} hexadecimal digits in checksum portion", .0.hash_len() * 2)] + InvalidChecksum(ChecksumAlgo), + #[error("expected a string with format \"algorithm=hex_checksum\"")] + InvalidFormat, +} diff --git a/src/cargo/core/compiler/future_incompat.rs b/src/cargo/core/compiler/future_incompat.rs index 7486138b77f..abe3de3e2a8 100644 --- a/src/cargo/core/compiler/future_incompat.rs +++ b/src/cargo/core/compiler/future_incompat.rs @@ -211,9 +211,9 @@ impl OnDiskReports { report_file .file() .read_to_string(&mut file_contents) - .with_context(|| "failed to read report")?; + .context("failed to read report")?; let on_disk_reports: OnDiskReports = - serde_json::from_str(&file_contents).with_context(|| "failed to load report")?; + serde_json::from_str(&file_contents).context("failed to load report")?; if on_disk_reports.version != ON_DISK_VERSION { bail!("unable to read reports; reports were saved from a future version of Cargo"); } diff --git a/src/cargo/core/compiler/job_queue/job_state.rs b/src/cargo/core/compiler/job_queue/job_state.rs index 12c04258d3e..b3b2e896007 100644 --- a/src/cargo/core/compiler/job_queue/job_state.rs +++ b/src/cargo/core/compiler/job_queue/job_state.rs @@ -103,7 +103,7 @@ impl<'a, 'gctx> JobState<'a, 'gctx> { } /// See [`Message::Diagnostic`] and [`Message::WarningCount`]. - pub fn emit_diag(&self, level: String, diag: String, fixable: bool) -> CargoResult<()> { + pub fn emit_diag(&self, level: &str, diag: String, fixable: bool) -> CargoResult<()> { if let Some(dedupe) = self.output { let emitted = dedupe.emit_diag(&diag)?; if level == "warning" { @@ -116,7 +116,7 @@ impl<'a, 'gctx> JobState<'a, 'gctx> { } else { self.messages.push_bounded(Message::Diagnostic { id: self.id, - level, + level: level.to_string(), diag, fixable, }); diff --git a/src/cargo/core/compiler/job_queue/mod.rs b/src/cargo/core/compiler/job_queue/mod.rs index e36faa12fff..6c55697c1a0 100644 --- a/src/cargo/core/compiler/job_queue/mod.rs +++ b/src/cargo/core/compiler/job_queue/mod.rs @@ -513,7 +513,7 @@ impl<'gctx> JobQueue<'gctx> { .into_helper_thread(move |token| { messages.push(Message::Token(token)); }) - .with_context(|| "failed to create helper thread for jobserver management")?; + .context("failed to create helper thread for jobserver management")?; // Create a helper thread to manage the diagnostics for rustfix if // necessary. @@ -700,7 +700,7 @@ impl<'gctx> DrainState<'gctx> { .push(FutureIncompatReportPackage { package_id, items }); } Message::Token(acquired_token) => { - let token = acquired_token.with_context(|| "failed to acquire jobserver token")?; + let token = acquired_token.context("failed to acquire jobserver token")?; self.tokens.push(token); } } @@ -1050,7 +1050,7 @@ impl<'gctx> DrainState<'gctx> { if unit.is_local() { // Do not show this if there are any errors or no fixable warnings if let FixableWarnings::Positive(fixable) = count.fixable { - // `cargo fix` doesnt have an option for custom builds + // `cargo fix` doesn't have an option for custom builds if !unit.target.is_custom_build() { // To make sure the correct command is shown for `clippy` we // check if `RUSTC_WORKSPACE_WRAPPER` is set and pointing towards diff --git a/src/cargo/core/compiler/mod.rs b/src/cargo/core/compiler/mod.rs index 3b0510ac94f..ee5fdbbecd6 100644 --- a/src/cargo/core/compiler/mod.rs +++ b/src/cargo/core/compiler/mod.rs @@ -54,6 +54,7 @@ mod unit; pub mod unit_dependencies; pub mod unit_graph; +use std::borrow::Cow; use std::collections::{HashMap, HashSet}; use std::env; use std::ffi::{OsStr, OsString}; @@ -97,6 +98,7 @@ use crate::util::{add_path_args, internal}; use cargo_util::{paths, ProcessBuilder, ProcessError}; use cargo_util_schemas::manifest::TomlDebugInfo; use cargo_util_schemas::manifest::TomlTrimPaths; +use cargo_util_schemas::manifest::TomlTrimPathsValue; use rustfix::diagnostics::Applicability; const RUSTDOC_CRATE_VERSION_FLAG: &str = "--crate-version"; @@ -408,7 +410,17 @@ fn rustc( ) }, ) - .map_err(verbose_if_simple_exit_code) + .map_err(|e| { + if output_options.errors_seen == 0 { + // If we didn't expect an error, do not require --verbose to fail. + // This is intended to debug + // https://github.com/rust-lang/crater/issues/733, where we are seeing + // Cargo exit unsuccessfully while seeming to not show any errors. + e + } else { + verbose_if_simple_exit_code(e) + } + }) .with_context(|| { // adapted from rustc_errors/src/lib.rs let warnings = match output_options.warnings_seen { @@ -683,10 +695,18 @@ fn prepare_rustc(build_runner: &BuildRunner<'_, '_>, unit: &Unit) -> CargoResult base.inherit_jobserver(&build_runner.jobserver); build_deps_args(&mut base, build_runner, unit)?; add_cap_lints(build_runner.bcx, unit, &mut base); - base.args(build_runner.bcx.rustflags_args(unit)); + if cargo_rustc_higher_args_precedence(build_runner) { + if let Some(args) = build_runner.bcx.extra_args_for(unit) { + base.args(args); + } + } + base.args(&unit.rustflags); if build_runner.bcx.gctx.cli_unstable().binary_dep_depinfo { base.arg("-Z").arg("binary-dep-depinfo"); } + if build_runner.bcx.gctx.cli_unstable().checksum_freshness { + base.arg("-Z").arg("checksum-hash-algorithm=blake3"); + } if is_primary { base.env("CARGO_PRIMARY_PACKAGE", "1"); @@ -732,14 +752,21 @@ fn prepare_rustdoc(build_runner: &BuildRunner<'_, '_>, unit: &Unit) -> CargoResu let doc_dir = build_runner.files().out_dir(unit); rustdoc.arg("-o").arg(&doc_dir); rustdoc.args(&features_args(unit)); - rustdoc.args(&check_cfg_args(build_runner, unit)); + rustdoc.args(&check_cfg_args(unit)); add_error_format_and_color(build_runner, &mut rustdoc); add_allow_features(build_runner, &mut rustdoc); + if let Some(trim_paths) = unit.profile.trim_paths.as_ref() { + trim_paths_args_rustdoc(&mut rustdoc, build_runner, unit, trim_paths)?; + } + rustdoc.args(unit.pkg.manifest().lint_rustflags()); - if let Some(args) = build_runner.bcx.extra_args_for(unit) { - rustdoc.args(args); + + if !cargo_rustc_higher_args_precedence(build_runner) { + if let Some(args) = build_runner.bcx.extra_args_for(unit) { + rustdoc.args(args); + } } let metadata = build_runner.metadata_for_doc_units[unit]; @@ -780,7 +807,12 @@ fn prepare_rustdoc(build_runner: &BuildRunner<'_, '_>, unit: &Unit) -> CargoResu rustdoc::add_output_format(build_runner, unit, &mut rustdoc)?; - rustdoc.args(bcx.rustdocflags_args(unit)); + if cargo_rustc_higher_args_precedence(build_runner) { + if let Some(args) = build_runner.bcx.extra_args_for(unit) { + rustdoc.args(args); + } + } + rustdoc.args(&unit.rustdocflags); if !crate_version_flag_already_present(&rustdoc) { append_crate_version_flag(unit, &mut rustdoc); @@ -1082,8 +1114,10 @@ fn build_base_args( cmd.args(unit.pkg.manifest().lint_rustflags()); cmd.args(&profile_rustflags); - if let Some(args) = build_runner.bcx.extra_args_for(unit) { - cmd.args(args); + if !cargo_rustc_higher_args_precedence(build_runner) { + if let Some(args) = build_runner.bcx.extra_args_for(unit) { + cmd.args(args); + } } // `-C overflow-checks` is implied by the setting of `-C debug-assertions`, @@ -1125,7 +1159,7 @@ fn build_base_args( } cmd.args(&features_args(unit)); - cmd.args(&check_cfg_args(build_runner, unit)); + cmd.args(&check_cfg_args(unit)); let meta = build_runner.files().metadata(unit); cmd.arg("-C").arg(&format!("metadata={}", meta)); @@ -1223,6 +1257,32 @@ fn features_args(unit: &Unit) -> Vec { args } +/// Like [`trim_paths_args`] but for rustdoc invocations. +fn trim_paths_args_rustdoc( + cmd: &mut ProcessBuilder, + build_runner: &BuildRunner<'_, '_>, + unit: &Unit, + trim_paths: &TomlTrimPaths, +) -> CargoResult<()> { + match trim_paths { + // rustdoc supports diagnostics trimming only. + TomlTrimPaths::Values(values) if !values.contains(&TomlTrimPathsValue::Diagnostics) => { + return Ok(()) + } + _ => {} + } + + // feature gate was checked during manifest/config parsing. + cmd.arg("-Zunstable-options"); + + // Order of `--remap-path-prefix` flags is important for `-Zbuild-std`. + // We want to show `/rustc//library/std` instead of `std-0.0.0`. + cmd.arg(package_remap(build_runner, unit)); + cmd.arg(sysroot_remap(build_runner, unit)); + + Ok(()) +} + /// Generates the `--remap-path-scope` and `--remap-path-prefix` for [RFC 3127]. /// See also unstable feature [`-Ztrim-paths`]. /// @@ -1242,125 +1302,120 @@ fn trim_paths_args( cmd.arg("-Zunstable-options"); cmd.arg(format!("-Zremap-path-scope={trim_paths}")); - let sysroot_remap = { - let sysroot = &build_runner.bcx.target_data.info(unit.kind).sysroot; - let mut remap = OsString::from("--remap-path-prefix="); - remap.push(sysroot); - remap.push("/lib/rustlib/src/rust"); // See also `detect_sysroot_src_path()`. - remap.push("="); - remap.push("/rustc/"); - // This remap logic aligns with rustc: - // - if let Some(commit_hash) = build_runner.bcx.rustc().commit_hash.as_ref() { - remap.push(commit_hash); - } else { - remap.push(build_runner.bcx.rustc().version.to_string()); - } - remap - }; - let package_remap = { - let pkg_root = unit.pkg.root(); - let ws_root = build_runner.bcx.ws.root(); - let mut remap = OsString::from("--remap-path-prefix="); - // Remap rules for dependencies - // - // * Git dependencies: remove ~/.cargo/git/checkouts prefix. - // * Registry dependencies: remove ~/.cargo/registry/src prefix. - // * Others (e.g. path dependencies): - // * relative paths to workspace root if inside the workspace directory. - // * otherwise remapped to `-`. - let source_id = unit.pkg.package_id().source_id(); - if source_id.is_git() { - remap.push( - build_runner - .bcx - .gctx - .git_checkouts_path() - .as_path_unlocked(), - ); - remap.push("="); - } else if source_id.is_registry() { - remap.push( - build_runner - .bcx - .gctx - .registry_source_path() - .as_path_unlocked(), - ); - remap.push("="); - } else if pkg_root.strip_prefix(ws_root).is_ok() { - remap.push(ws_root); - remap.push("=."); // remap to relative rustc work dir explicitly - } else { - remap.push(pkg_root); - remap.push("="); - remap.push(unit.pkg.name()); - remap.push("-"); - remap.push(unit.pkg.version().to_string()); - } - remap - }; - // Order of `--remap-path-prefix` flags is important for `-Zbuild-std`. // We want to show `/rustc//library/std` instead of `std-0.0.0`. - cmd.arg(package_remap); - cmd.arg(sysroot_remap); + cmd.arg(package_remap(build_runner, unit)); + cmd.arg(sysroot_remap(build_runner, unit)); Ok(()) } -/// Generates the `--check-cfg` arguments for the `unit`. -/// See unstable feature [`check-cfg`]. +/// Path prefix remap rules for sysroot. /// -/// [`check-cfg`]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#check-cfg -fn check_cfg_args(build_runner: &BuildRunner<'_, '_>, unit: &Unit) -> Vec { - if build_runner.bcx.gctx.cli_unstable().check_cfg { - // The routine below generates the --check-cfg arguments. Our goals here are to - // enable the checking of conditionals and pass the list of declared features. - // - // In the simplified case, it would resemble something like this: - // - // --check-cfg=cfg() --check-cfg=cfg(feature, values(...)) - // - // but having `cfg()` is redundant with the second argument (as well-known names - // and values are implicitly enabled when one or more `--check-cfg` argument is - // passed) so we don't emit it and just pass: - // - // --check-cfg=cfg(feature, values(...)) - // - // This way, even if there are no declared features, the config `feature` will - // still be expected, meaning users would get "unexpected value" instead of name. - // This wasn't always the case, see rust-lang#119930 for some details. - - let gross_cap_estimation = unit.pkg.summary().features().len() * 7 + 25; - let mut arg_feature = OsString::with_capacity(gross_cap_estimation); +/// This remap logic aligns with rustc: +/// +fn sysroot_remap(build_runner: &BuildRunner<'_, '_>, unit: &Unit) -> OsString { + let sysroot = &build_runner.bcx.target_data.info(unit.kind).sysroot; + let mut remap = OsString::from("--remap-path-prefix="); + remap.push(sysroot); + remap.push("/lib/rustlib/src/rust"); // See also `detect_sysroot_src_path()`. + remap.push("="); + remap.push("/rustc/"); + if let Some(commit_hash) = build_runner.bcx.rustc().commit_hash.as_ref() { + remap.push(commit_hash); + } else { + remap.push(build_runner.bcx.rustc().version.to_string()); + } + remap +} - arg_feature.push("cfg(feature, values("); - for (i, feature) in unit.pkg.summary().features().keys().enumerate() { - if i != 0 { - arg_feature.push(", "); - } - arg_feature.push("\""); - arg_feature.push(feature); - arg_feature.push("\""); - } - arg_feature.push("))"); - - // We also include the `docsrs` cfg from the docs.rs service. We include it here - // (in Cargo) instead of rustc, since there is a much closer relationship between - // Cargo and docs.rs than rustc and docs.rs. In particular, all users of docs.rs use - // Cargo, but not all users of rustc (like Rust-for-Linux) use docs.rs. - - vec![ - OsString::from("-Zunstable-options"), - OsString::from("--check-cfg"), - OsString::from("cfg(docsrs)"), - OsString::from("--check-cfg"), - arg_feature, - ] +/// Path prefix remap rules for dependencies. +/// +/// * Git dependencies: remove `~/.cargo/git/checkouts` prefix. +/// * Registry dependencies: remove `~/.cargo/registry/src` prefix. +/// * Others (e.g. path dependencies): +/// * relative paths to workspace root if inside the workspace directory. +/// * otherwise remapped to `-`. +fn package_remap(build_runner: &BuildRunner<'_, '_>, unit: &Unit) -> OsString { + let pkg_root = unit.pkg.root(); + let ws_root = build_runner.bcx.ws.root(); + let mut remap = OsString::from("--remap-path-prefix="); + let source_id = unit.pkg.package_id().source_id(); + if source_id.is_git() { + remap.push( + build_runner + .bcx + .gctx + .git_checkouts_path() + .as_path_unlocked(), + ); + remap.push("="); + } else if source_id.is_registry() { + remap.push( + build_runner + .bcx + .gctx + .registry_source_path() + .as_path_unlocked(), + ); + remap.push("="); + } else if pkg_root.strip_prefix(ws_root).is_ok() { + remap.push(ws_root); + remap.push("=."); // remap to relative rustc work dir explicitly } else { - Vec::new() + remap.push(pkg_root); + remap.push("="); + remap.push(unit.pkg.name()); + remap.push("-"); + remap.push(unit.pkg.version().to_string()); } + remap +} + +/// Generates the `--check-cfg` arguments for the `unit`. +fn check_cfg_args(unit: &Unit) -> Vec { + // The routine below generates the --check-cfg arguments. Our goals here are to + // enable the checking of conditionals and pass the list of declared features. + // + // In the simplified case, it would resemble something like this: + // + // --check-cfg=cfg() --check-cfg=cfg(feature, values(...)) + // + // but having `cfg()` is redundant with the second argument (as well-known names + // and values are implicitly enabled when one or more `--check-cfg` argument is + // passed) so we don't emit it and just pass: + // + // --check-cfg=cfg(feature, values(...)) + // + // This way, even if there are no declared features, the config `feature` will + // still be expected, meaning users would get "unexpected value" instead of name. + // This wasn't always the case, see rust-lang#119930 for some details. + + let gross_cap_estimation = unit.pkg.summary().features().len() * 7 + 25; + let mut arg_feature = OsString::with_capacity(gross_cap_estimation); + + arg_feature.push("cfg(feature, values("); + for (i, feature) in unit.pkg.summary().features().keys().enumerate() { + if i != 0 { + arg_feature.push(", "); + } + arg_feature.push("\""); + arg_feature.push(feature); + arg_feature.push("\""); + } + arg_feature.push("))"); + + // We also include the `docsrs` cfg from the docs.rs service. We include it here + // (in Cargo) instead of rustc, since there is a much closer relationship between + // Cargo and docs.rs than rustc and docs.rs. In particular, all users of docs.rs use + // Cargo, but not all users of rustc (like Rust-for-Linux) use docs.rs. + + vec![ + OsString::from("--check-cfg"), + OsString::from("cfg(docsrs)"), + OsString::from("--check-cfg"), + arg_feature, + ] } /// Adds LTO related codegen flags. @@ -1476,11 +1531,8 @@ fn add_custom_flags( for cfg in output.cfgs.iter() { cmd.arg("--cfg").arg(cfg); } - if !output.check_cfgs.is_empty() { - cmd.arg("-Zunstable-options"); - for check_cfg in &output.check_cfgs { - cmd.arg("--check-cfg").arg(check_cfg); - } + for check_cfg in &output.check_cfgs { + cmd.arg("--check-cfg").arg(check_cfg); } for (name, value) in output.env.iter() { cmd.env(name, value); @@ -1708,10 +1760,15 @@ fn on_stderr_line_inner( .. } => { #[derive(serde::Deserialize)] - struct CompilerMessage { + struct CompilerMessage<'a> { + // `rendered` contains escape sequences, which can't be + // zero-copy deserialized by serde_json. + // See https://github.com/serde-rs/json/issues/742 rendered: String, - message: String, - level: String, + #[serde(borrow)] + message: Cow<'a, str>, + #[serde(borrow)] + level: Cow<'a, str>, children: Vec, } @@ -1734,7 +1791,8 @@ fn on_stderr_line_inner( suggestion_applicability: Option, } - if let Ok(mut msg) = serde_json::from_str::(compiler_message.get()) { + if let Ok(mut msg) = serde_json::from_str::>(compiler_message.get()) + { if msg.message.starts_with("aborting due to") || msg.message.ends_with("warning emitted") || msg.message.ends_with("warnings emitted") @@ -1760,7 +1818,7 @@ fn on_stderr_line_inner( }) .any(|b| b); count_diagnostic(&msg.level, options); - state.emit_diag(msg.level, rendered, machine_applicable)?; + state.emit_diag(&msg.level, rendered, machine_applicable)?; } return Ok(true); } @@ -1771,16 +1829,17 @@ fn on_stderr_line_inner( // cached replay to enable/disable colors without re-invoking rustc. MessageFormat::Json { ansi: false, .. } => { #[derive(serde::Deserialize, serde::Serialize)] - struct CompilerMessage { + struct CompilerMessage<'a> { rendered: String, - #[serde(flatten)] - other: std::collections::BTreeMap, + #[serde(flatten, borrow)] + other: std::collections::BTreeMap, serde_json::Value>, } - if let Ok(mut error) = serde_json::from_str::(compiler_message.get()) { + if let Ok(mut error) = + serde_json::from_str::>(compiler_message.get()) + { error.rendered = anstream::adapter::strip_str(&error.rendered).to_string(); let new_line = serde_json::to_string(&error)?; - let new_msg: Box = serde_json::from_str(&new_line)?; - compiler_message = new_msg; + compiler_message = serde_json::value::RawValue::from_string(new_line)?; } } @@ -1796,11 +1855,12 @@ fn on_stderr_line_inner( // Look for a matching directive and inform Cargo internally that a // metadata file has been produced. #[derive(serde::Deserialize)] - struct ArtifactNotification { - artifact: String, + struct ArtifactNotification<'a> { + #[serde(borrow)] + artifact: Cow<'a, str>, } - if let Ok(artifact) = serde_json::from_str::(compiler_message.get()) { + if let Ok(artifact) = serde_json::from_str::>(compiler_message.get()) { trace!("found directive from rustc: `{}`", artifact.artifact); if artifact.artifact.ends_with(".rmeta") { debug!("looks like metadata finished early!"); @@ -1818,11 +1878,22 @@ fn on_stderr_line_inner( } #[derive(serde::Deserialize)] - struct CompilerMessage { - level: String, + struct CompilerMessage<'a> { + #[serde(borrow)] + message: Cow<'a, str>, + #[serde(borrow)] + level: Cow<'a, str>, } - if let Ok(message) = serde_json::from_str::(compiler_message.get()) { - count_diagnostic(&message.level, options); + + if let Ok(msg) = serde_json::from_str::>(compiler_message.get()) { + if msg.message.starts_with("aborting due to") + || msg.message.ends_with("warning emitted") + || msg.message.ends_with("warnings emitted") + { + // Skip this line; we'll print our own summary at the end. + return Ok(true); + } + count_diagnostic(&msg.level, options); } let msg = machine_message::FromCompiler { @@ -1936,3 +2007,19 @@ fn scrape_output_path(build_runner: &BuildRunner<'_, '_>, unit: &Unit) -> CargoR .outputs(unit) .map(|outputs| outputs[0].path.clone()) } + +/// Provides a way to change the precedence of `cargo rustc -- `. +/// +/// This is intended to be a short-live function. +/// +/// See +fn cargo_rustc_higher_args_precedence(build_runner: &BuildRunner<'_, '_>) -> bool { + build_runner.bcx.gctx.nightly_features_allowed + && build_runner + .bcx + .gctx + .get_env("__CARGO_RUSTC_ORIG_ARGS_PRIO") + .ok() + .as_deref() + != Some("1") +} diff --git a/src/cargo/core/compiler/output_depinfo.rs b/src/cargo/core/compiler/output_depinfo.rs index 11d320c6a8c..d9efbaae3ff 100644 --- a/src/cargo/core/compiler/output_depinfo.rs +++ b/src/cargo/core/compiler/output_depinfo.rs @@ -61,7 +61,7 @@ fn add_deps_for_unit( build_runner.files().host_root(), &dep_info_loc, )? { - for path in paths.files { + for path in paths.files.into_keys() { deps.insert(path); } } else { @@ -154,7 +154,12 @@ pub fn output_depinfo(build_runner: &mut BuildRunner<'_, '_>, unit: &Unit) -> Ca // If nothing changed don't recreate the file which could alter // its mtime if let Ok(previous) = fingerprint::parse_rustc_dep_info(&output_path) { - if previous.files.iter().eq(deps.iter().map(Path::new)) { + if previous + .files + .iter() + .map(|(path, _checksum)| path) + .eq(deps.iter().map(Path::new)) + { continue; } } diff --git a/src/cargo/core/compiler/standard_lib.rs b/src/cargo/core/compiler/standard_lib.rs index 684e4426c04..d9847b5a8f3 100644 --- a/src/cargo/core/compiler/standard_lib.rs +++ b/src/cargo/core/compiler/standard_lib.rs @@ -1,18 +1,18 @@ //! Code for building the standard library. +use cargo_util_schemas::core::{PackageIdSpec, SourceKind}; use crate::core::compiler::unit_dependencies::IsArtifact; use crate::core::compiler::UnitInterner; use crate::core::compiler::{CompileKind, CompileMode, RustcTargetData, Unit}; use crate::core::profiles::{Profiles, UnitFor}; use crate::core::resolver::features::{CliFeatures, FeaturesFor, ResolvedFeatures}; use crate::core::resolver::HasDevUnits; -use crate::core::{Dependency, PackageId, PackageSet, Resolve, SourceId, Workspace}; +use crate::core::{GitReference, PackageId, PackageSet, Resolve, Workspace}; use crate::ops::{self, Packages}; use crate::util::errors::CargoResult; use crate::GlobalContext; use std::collections::{HashMap, HashSet}; use std::path::PathBuf; -use std::rc::Rc; use super::BuildConfig; @@ -74,60 +74,11 @@ pub fn resolve_std<'gctx>( } let src_path = detect_sysroot_src_path(target_data)?; - let to_patch = [ - "rustc-std-workspace-core", - "rustc-std-workspace-alloc", - "rustc-std-workspace-std", - ]; - let patches = to_patch - .iter() - .map(|&name| { - let source_path = SourceId::for_path(&src_path.join("library").join(name))?; - let dep = Dependency::parse(name, None, source_path)?; - Ok(dep) - }) - .collect::>>()?; - let crates_io_url = crate::sources::CRATES_IO_INDEX.parse().unwrap(); - let patch = HashMap::from([(crates_io_url, patches)]); - let members = vec![ - String::from("library/std"), - String::from("library/core"), - String::from("library/alloc"), - String::from("library/sysroot"), - ]; - let ws_config = crate::core::WorkspaceConfig::Root(crate::core::WorkspaceRootConfig::new( - &src_path, - &Some(members), - /*default_members*/ &None, - /*exclude*/ &None, - /*inheritable*/ &None, - /*custom_metadata*/ &None, - )); - let virtual_manifest = crate::core::VirtualManifest::new( - Rc::default(), - Rc::new(toml_edit::ImDocument::parse("".to_owned()).expect("empty is valid TOML")), - Rc::default(), - Rc::default(), - /*replace*/ Vec::new(), - patch, - ws_config, - crate::core::Features::default(), - None, - ); - + let std_ws_manifest_path = src_path.join("Cargo.toml"); let gctx = ws.gctx(); - // This is a delicate hack. In order for features to resolve correctly, - // the resolver needs to run a specific "current" member of the workspace. - // Thus, in order to set the features for `std`, we need to set `sysroot` - // to be the "current" member. `sysroot` is the root, and all other - // standard library crates are dependencies from there. Since none of the - // other crates need to alter their features, this should be fine, for - // now. Perhaps in the future features will be decoupled from the resolver - // and it will be easier to control feature selection. - let current_manifest = src_path.join("library/sysroot/Cargo.toml"); // TODO: Consider doing something to enforce --locked? Or to prevent the // lock file from being written, such as setting ephemeral. - let mut std_ws = Workspace::new_virtual(src_path, current_manifest, virtual_manifest, gctx)?; + let mut std_ws = Workspace::new(&std_ws_manifest_path, gctx)?; // Don't require optional dependencies in this workspace, aka std's own // `[dev-dependencies]`. No need for us to generate a `Resolve` which has // those included because we'll never use them anyway. @@ -137,7 +88,18 @@ pub fn resolve_std<'gctx>( let mut spec_pkgs = Vec::from(crates); spec_pkgs.push("sysroot".to_string()); let spec = Packages::Packages(spec_pkgs); - let specs = spec.to_package_id_specs(&std_ws)?; + let mut specs = spec.to_package_id_specs(&std_ws)?; + specs.push( + PackageIdSpec::new("compiler_builtins".into()) + .with_kind(SourceKind::Git(GitReference::Tag( + "solana-tools-v1.43".to_string(), + ))) + .with_url( + "https://github.com/solana-labs/compiler-builtins" + .parse() + .unwrap(), + ), + ); let features = match &gctx.cli_unstable().build_std_features { Some(list) => list.clone(), None => vec![ @@ -149,6 +111,7 @@ pub fn resolve_std<'gctx>( let cli_features = CliFeatures::from_command_line( &features, /*all_features*/ false, /*uses_default_features*/ false, )?; + let dry_run = false; let resolve = ops::resolve_ws_with_opts( &std_ws, target_data, @@ -157,6 +120,7 @@ pub fn resolve_std<'gctx>( &specs, HasDevUnits::No, crate::core::resolver::features::ForceAllTargets::No, + dry_run, )?; Ok(( resolve.pkg_set, @@ -176,6 +140,7 @@ pub fn generate_std_roots( package_set: &PackageSet<'_>, interner: &UnitInterner, profiles: &Profiles, + target_data: &RustcTargetData<'_>, ) -> CargoResult>> { // Generate the root Units for the standard library. let std_ids = crates @@ -214,6 +179,9 @@ pub fn generate_std_roots( *kind, mode, features.clone(), + target_data.info(*kind).rustflags.clone(), + target_data.info(*kind).rustdocflags.clone(), + target_data.target_config(*kind).links_overrides.clone(), /*is_std*/ true, /*dep_hash*/ 0, IsArtifact::No, @@ -236,7 +204,8 @@ fn detect_sysroot_src_path(target_data: &RustcTargetData<'_>) -> CargoResult getComputedStyle(document.body).getPropertyValue(name); +const TEXT_COLOR = getCssColor('--text'); +const BG_COLOR = getCssColor('--background'); +const CANVAS_BG = getCssColor('--canvas-background'); +const AXES_COLOR = getCssColor('--canvas-axes'); +const GRID_COLOR = getCssColor('--canvas-grid'); +const BLOCK_COLOR = getCssColor('--canvas-block'); +const CUSTOM_BUILD_COLOR = getCssColor('--canvas-custom-build'); +const NOT_CUSTOM_BUILD_COLOR = getCssColor('--canvas-not-custom-build'); +const DEP_LINE_COLOR = getCssColor('--canvas-dep-line'); +const DEP_LINE_HIGHLIGHTED_COLOR = getCssColor('--canvas-dep-line-highlighted'); +const CPU_COLOR = getCssColor('--canvas-cpu'); + for (let n=0; n 1) { ctx.beginPath(); ctx.fillStyle = cpuFillStyle; @@ -245,8 +261,8 @@ function render_timing_graph() { ctx.save(); ctx.translate(canvas_width-200, MARGIN); // background - ctx.fillStyle = '#fff'; - ctx.strokeStyle = '#000'; + ctx.fillStyle = BG_COLOR; + ctx.strokeStyle = TEXT_COLOR; ctx.lineWidth = 1; ctx.textBaseline = 'middle' ctx.textAlign = 'start'; @@ -255,7 +271,7 @@ function render_timing_graph() { ctx.stroke(); ctx.fill(); - ctx.fillStyle = '#000' + ctx.fillStyle = TEXT_COLOR; ctx.beginPath(); ctx.lineWidth = 2; ctx.strokeStyle = 'red'; @@ -282,7 +298,7 @@ function render_timing_graph() { ctx.fillStyle = cpuFillStyle ctx.fillRect(15, 60, 30, 15); ctx.fill(); - ctx.fillStyle = 'black'; + ctx.fillStyle = TEXT_COLOR; ctx.fillText('CPU Usage', 54, 71); ctx.restore(); @@ -311,12 +327,13 @@ function draw_graph_axes(id, graph_height) { const canvas_width = Math.max(graph_width + X_LINE + 30, X_LINE + 250); const canvas_height = graph_height + MARGIN + Y_LINE; let ctx = setup_canvas(id, canvas_width, canvas_height); - ctx.fillStyle = '#f7f7f7'; + ctx.fillStyle = CANVAS_BG; ctx.fillRect(0, 0, canvas_width, canvas_height); ctx.lineWidth = 2; ctx.font = '16px sans-serif'; ctx.textAlign = 'center'; + ctx.strokeStyle = AXES_COLOR; // Draw main axes. ctx.beginPath(); @@ -327,7 +344,7 @@ function draw_graph_axes(id, graph_height) { // Draw X tick marks. const {step, tick_dist, num_ticks} = split_ticks(DURATION, px_per_sec, graph_width); - ctx.fillStyle = '#303030'; + ctx.fillStyle = AXES_COLOR; for (let n=0; n Timings<'gctx> { .sort_unstable_by(|a, b| a.start.partial_cmp(&b.start).unwrap()); if self.report_html { self.report_html(build_runner, error) - .with_context(|| "failed to save timing report")?; + .context("failed to save timing report")?; } Ok(()) } @@ -609,8 +609,64 @@ static HTML_TMPL: &str = r#" Cargo Build Timings β€” {ROOTS} diff --git a/src/cargo/core/compiler/unit.rs b/src/cargo/core/compiler/unit.rs index 4e96584c3e4..628b0b784ff 100644 --- a/src/cargo/core/compiler/unit.rs +++ b/src/cargo/core/compiler/unit.rs @@ -9,12 +9,14 @@ use crate::util::hex::short_hash; use crate::util::interning::InternedString; use crate::util::GlobalContext; use std::cell::RefCell; -use std::collections::HashSet; +use std::collections::{BTreeMap, HashSet}; use std::fmt; use std::hash::{Hash, Hasher}; use std::ops::Deref; use std::rc::Rc; +use super::BuildOutput; + /// All information needed to define a unit. /// /// A unit is an object that has enough information so that cargo knows how to build it. @@ -59,6 +61,34 @@ pub struct UnitInner { /// The `cfg` features to enable for this unit. /// This must be sorted. pub features: Vec, + /// Extra compiler flags to pass to `rustc` for a given unit. + /// + /// Although it depends on the caller, in the current Cargo implementation, + /// these flags take precedence over those from [`BuildContext::extra_args_for`]. + /// + /// As of now, these flags come from environment variables and configurations. + /// See [`TargetInfo.rustflags`] for more on how Cargo collects them. + /// + /// [`BuildContext::extra_args_for`]: crate::core::compiler::build_context::BuildContext::extra_args_for + /// [`TargetInfo.rustflags`]: crate::core::compiler::build_context::TargetInfo::rustflags + pub rustflags: Rc<[String]>, + /// Extra compiler flags to pass to `rustdoc` for a given unit. + /// + /// Although it depends on the caller, in the current Cargo implementation, + /// these flags take precedence over those from [`BuildContext::extra_args_for`]. + /// + /// As of now, these flags come from environment variables and configurations. + /// See [`TargetInfo.rustdocflags`] for more on how Cargo collects them. + /// + /// [`BuildContext::extra_args_for`]: crate::core::compiler::build_context::BuildContext::extra_args_for + /// [`TargetInfo.rustdocflags`]: crate::core::compiler::build_context::TargetInfo::rustdocflags + pub rustdocflags: Rc<[String]>, + /// Build script override for the given library name. + /// + /// Any package with a `links` value for the given library name will skip + /// running its build script and instead use the given output from the + /// config file. + pub links_overrides: Rc>, // if `true`, the dependency is an artifact dependency, requiring special handling when // calculating output directories, linkage and environment variables provided to builds. pub artifact: IsArtifact, @@ -151,6 +181,9 @@ impl fmt::Debug for Unit { .field("kind", &self.kind) .field("mode", &self.mode) .field("features", &self.features) + .field("rustflags", &self.rustflags) + .field("rustdocflags", &self.rustdocflags) + .field("links_overrides", &self.links_overrides) .field("artifact", &self.artifact.is_true()) .field( "artifact_target_for_features", @@ -198,6 +231,9 @@ impl UnitInterner { kind: CompileKind, mode: CompileMode, features: Vec, + rustflags: Rc<[String]>, + rustdocflags: Rc<[String]>, + links_overrides: Rc>, is_std: bool, dep_hash: u64, artifact: IsArtifact, @@ -231,6 +267,9 @@ impl UnitInterner { kind, mode, features, + rustflags, + rustdocflags, + links_overrides, is_std, dep_hash, artifact, diff --git a/src/cargo/core/compiler/unit_dependencies.rs b/src/cargo/core/compiler/unit_dependencies.rs index 72f47c6491f..59e356fc682 100644 --- a/src/cargo/core/compiler/unit_dependencies.rs +++ b/src/cargo/core/compiler/unit_dependencies.rs @@ -24,7 +24,7 @@ use crate::core::compiler::unit_graph::{UnitDep, UnitGraph}; use crate::core::compiler::{ CompileKind, CompileMode, CrateType, RustcTargetData, Unit, UnitInterner, }; -use crate::core::dependency::{Artifact, ArtifactTarget, DepKind}; +use crate::core::dependency::{Artifact, ArtifactKind, ArtifactTarget, DepKind}; use crate::core::profiles::{Profile, Profiles, UnitFor}; use crate::core::resolver::features::{FeaturesFor, ResolvedFeatures}; use crate::core::resolver::Resolve; @@ -457,11 +457,7 @@ fn compute_deps_custom_build( state: &State<'_, '_>, ) -> CargoResult> { if let Some(links) = unit.pkg.manifest().links() { - if state - .target_data - .script_override(links, unit.kind) - .is_some() - { + if unit.links_overrides.get(links).is_some() { // Overridden build scripts don't have any dependencies. return Ok(Vec::new()); } @@ -555,17 +551,20 @@ fn artifact_targets_to_unit_deps( let ret = match_artifacts_kind_with_targets(dep, artifact_pkg.targets(), parent.pkg.name().as_str())? .into_iter() - .map(|(_artifact_kind, target)| target) - .flat_map(|target| { + .flat_map(|(artifact_kind, target)| { // We split target libraries into individual units, even though rustc is able - // to produce multiple kinds in an single invocation for the sole reason that + // to produce multiple kinds in a single invocation for the sole reason that // each artifact kind has its own output directory, something we can't easily // teach rustc for now. match target.kind() { TargetKind::Lib(kinds) => Box::new( kinds .iter() - .filter(|tk| matches!(tk, CrateType::Cdylib | CrateType::Staticlib)) + .filter(move |tk| match (tk, artifact_kind) { + (CrateType::Cdylib, ArtifactKind::Cdylib) => true, + (CrateType::Staticlib, ArtifactKind::Staticlib) => true, + _ => false, + }) .map(|target_kind| { new_unit_dep( state, @@ -856,6 +855,13 @@ fn new_unit_dep_with_profile( kind, mode, features, + state.target_data.info(kind).rustflags.clone(), + state.target_data.info(kind).rustdocflags.clone(), + state + .target_data + .target_config(kind) + .links_overrides + .clone(), state.is_std, /*dep_hash*/ 0, artifact.map_or(IsArtifact::No, |_| IsArtifact::Yes), diff --git a/src/cargo/core/dependency.rs b/src/cargo/core/dependency.rs index ddd43e1f59d..2ec2aa1a328 100644 --- a/src/cargo/core/dependency.rs +++ b/src/cargo/core/dependency.rs @@ -5,11 +5,11 @@ use serde::Serialize; use std::borrow::Cow; use std::fmt; use std::path::PathBuf; -use std::rc::Rc; +use std::sync::Arc; use tracing::trace; use crate::core::compiler::{CompileKind, CompileTarget}; -use crate::core::{PackageId, SourceId, Summary}; +use crate::core::{CliUnstable, Feature, Features, PackageId, SourceId, Summary}; use crate::util::errors::CargoResult; use crate::util::interning::InternedString; use crate::util::OptVersionReq; @@ -18,7 +18,7 @@ use crate::util::OptVersionReq; /// Cheap to copy. #[derive(PartialEq, Eq, Hash, Clone, Debug)] pub struct Dependency { - inner: Rc, + inner: Arc, } /// The data underlying a `Dependency`. @@ -52,50 +52,32 @@ struct Inner { } #[derive(Serialize)] -struct SerializedDependency<'a> { - name: &'a str, +pub struct SerializedDependency { + name: InternedString, source: SourceId, req: String, kind: DepKind, - rename: Option<&'a str>, + rename: Option, optional: bool, uses_default_features: bool, - features: &'a [InternedString], + features: Vec, #[serde(skip_serializing_if = "Option::is_none")] - artifact: Option<&'a Artifact>, - target: Option<&'a Platform>, + artifact: Option, + target: Option, /// The registry URL this dependency is from. /// If None, then it comes from the default registry (crates.io). - registry: Option<&'a str>, + registry: Option, /// The file system path for a local path dependency. #[serde(skip_serializing_if = "Option::is_none")] path: Option, -} -impl ser::Serialize for Dependency { - fn serialize(&self, s: S) -> Result - where - S: ser::Serializer, - { - let registry_id = self.registry_id(); - SerializedDependency { - name: &*self.package_name(), - source: self.source_id(), - req: self.version_req().to_string(), - kind: self.kind(), - optional: self.is_optional(), - uses_default_features: self.uses_default_features(), - features: self.features(), - target: self.platform(), - rename: self.explicit_name_in_toml().map(|s| s.as_str()), - registry: registry_id.as_ref().map(|sid| sid.url().as_str()), - path: self.source_id().local_path(), - artifact: self.artifact(), - } - .serialize(s) - } + /// `public` flag is unset if `-Zpublic-dependency` is not enabled + /// + /// Once that feature is stabilized, `public` will not need to be `Option` + #[serde(skip_serializing_if = "Option::is_none")] + public: Option, } #[derive(PartialEq, Eq, Hash, Ord, PartialOrd, Clone, Debug, Copy)] @@ -152,7 +134,7 @@ impl Dependency { let mut ret = Dependency::new_override(name, source_id); { - let ptr = Rc::make_mut(&mut ret.inner); + let ptr = Arc::make_mut(&mut ret.inner); ptr.only_match_name = false; ptr.req = version_req; ptr.specified_req = specified_req; @@ -163,7 +145,7 @@ impl Dependency { pub fn new_override(name: InternedString, source_id: SourceId) -> Dependency { assert!(!name.is_empty()); Dependency { - inner: Rc::new(Inner { + inner: Arc::new(Inner { name, source_id, registry_id: None, @@ -182,6 +164,34 @@ impl Dependency { } } + pub fn serialized( + &self, + unstable_flags: &CliUnstable, + features: &Features, + ) -> SerializedDependency { + SerializedDependency { + name: self.package_name(), + source: self.source_id(), + req: self.version_req().to_string(), + kind: self.kind(), + optional: self.is_optional(), + uses_default_features: self.uses_default_features(), + features: self.features().to_vec(), + target: self.inner.platform.clone(), + rename: self.explicit_name_in_toml(), + registry: self.registry_id().as_ref().map(|sid| sid.url().to_string()), + path: self.source_id().local_path(), + artifact: self.inner.artifact.clone(), + public: if unstable_flags.public_dependency + || features.is_enabled(Feature::public_dependency()) + { + Some(self.inner.public) + } else { + None + }, + } + } + pub fn version_req(&self) -> &OptVersionReq { &self.inner.req } @@ -241,7 +251,7 @@ impl Dependency { } pub fn set_registry_id(&mut self, registry_id: SourceId) -> &mut Dependency { - Rc::make_mut(&mut self.inner).registry_id = Some(registry_id); + Arc::make_mut(&mut self.inner).registry_id = Some(registry_id); self } @@ -259,7 +269,7 @@ impl Dependency { // Setting 'public' only makes sense for normal dependencies assert_eq!(self.kind(), DepKind::Normal); } - Rc::make_mut(&mut self.inner).public = public; + Arc::make_mut(&mut self.inner).public = public; self } @@ -286,7 +296,7 @@ impl Dependency { // Setting 'public' only makes sense for normal dependencies assert_eq!(kind, DepKind::Normal); } - Rc::make_mut(&mut self.inner).kind = kind; + Arc::make_mut(&mut self.inner).kind = kind; self } @@ -295,36 +305,36 @@ impl Dependency { &mut self, features: impl IntoIterator>, ) -> &mut Dependency { - Rc::make_mut(&mut self.inner).features = features.into_iter().map(|s| s.into()).collect(); + Arc::make_mut(&mut self.inner).features = features.into_iter().map(|s| s.into()).collect(); self } /// Sets whether the dependency requests default features of the package. pub fn set_default_features(&mut self, default_features: bool) -> &mut Dependency { - Rc::make_mut(&mut self.inner).default_features = default_features; + Arc::make_mut(&mut self.inner).default_features = default_features; self } /// Sets whether the dependency is optional. pub fn set_optional(&mut self, optional: bool) -> &mut Dependency { - Rc::make_mut(&mut self.inner).optional = optional; + Arc::make_mut(&mut self.inner).optional = optional; self } /// Sets the source ID for this dependency. pub fn set_source_id(&mut self, id: SourceId) -> &mut Dependency { - Rc::make_mut(&mut self.inner).source_id = id; + Arc::make_mut(&mut self.inner).source_id = id; self } /// Sets the version requirement for this dependency. pub fn set_version_req(&mut self, req: OptVersionReq) -> &mut Dependency { - Rc::make_mut(&mut self.inner).req = req; + Arc::make_mut(&mut self.inner).req = req; self } pub fn set_platform(&mut self, platform: Option) -> &mut Dependency { - Rc::make_mut(&mut self.inner).platform = platform; + Arc::make_mut(&mut self.inner).platform = platform; self } @@ -332,7 +342,7 @@ impl Dependency { &mut self, name: impl Into, ) -> &mut Dependency { - Rc::make_mut(&mut self.inner).explicit_name_in_toml = Some(name.into()); + Arc::make_mut(&mut self.inner).explicit_name_in_toml = Some(name.into()); self } @@ -346,7 +356,7 @@ impl Dependency { self.source_id(), id ); - let me = Rc::make_mut(&mut self.inner); + let me = Arc::make_mut(&mut self.inner); me.req.lock_to(id.version()); // Only update the `precise` of this source to preserve other @@ -361,7 +371,7 @@ impl Dependency { /// Mainly used in dependency patching like `[patch]` or `[replace]`, which /// doesn't need to lock the entire dependency to a specific [`PackageId`]. pub fn lock_version(&mut self, version: &semver::Version) -> &mut Dependency { - let me = Rc::make_mut(&mut self.inner); + let me = Arc::make_mut(&mut self.inner); me.req.lock_to(version); self } @@ -430,7 +440,7 @@ impl Dependency { } pub(crate) fn set_artifact(&mut self, artifact: Artifact) { - Rc::make_mut(&mut self.inner).artifact = Some(artifact); + Arc::make_mut(&mut self.inner).artifact = Some(artifact); } pub(crate) fn artifact(&self) -> Option<&Artifact> { @@ -453,7 +463,7 @@ impl Dependency { /// This information represents a requirement in the package this dependency refers to. #[derive(PartialEq, Eq, Hash, Clone, Debug)] pub struct Artifact { - inner: Rc>, + inner: Arc>, is_lib: bool, target: Option, } @@ -492,7 +502,7 @@ impl Artifact { .collect::, _>>()?, )?; Ok(Artifact { - inner: Rc::new(kinds), + inner: Arc::new(kinds), is_lib, target: target.map(ArtifactTarget::parse).transpose()?, }) diff --git a/src/cargo/core/features.rs b/src/cargo/core/features.rs index e4d559c963c..2638ff95234 100644 --- a/src/cargo/core/features.rs +++ b/src/cargo/core/features.rs @@ -27,7 +27,7 @@ //! * Good for: `.cargo/config.toml`, `config.json` index file (gate: `-Z`) //! //! For features that touch multiple parts of Cargo, multiple feature gating strategies (error, -//! warn, ignore) and mechnisms (`-Z`, `cargo-features`) may be used. +//! warn, ignore) and mechanisms (`-Z`, `cargo-features`) may be used. //! //! When adding new tests for your feature, usually the tests should go into a //! new module of the testsuite named after the feature. See @@ -343,7 +343,7 @@ impl FromStr for Edition { } } -#[derive(PartialEq)] +#[derive(Debug, PartialEq)] enum Status { Stable, Unstable, @@ -387,11 +387,11 @@ macro_rules! features { $( $(#[$attr])* #[doc = concat!("\n\n\nSee .")] - pub fn $feature() -> &'static Feature { + pub const fn $feature() -> &'static Feature { fn get(features: &Features) -> bool { stab!($stab) == Status::Stable || features.$feature } - static FEAT: Feature = Feature { + const FEAT: Feature = Feature { name: stringify!($feature), stability: stab!($stab), version: $version, @@ -406,6 +406,10 @@ macro_rules! features { fn is_enabled(&self, features: &Features) -> bool { (self.get)(features) } + + pub(crate) fn name(&self) -> &str { + self.name + } } impl Features { @@ -509,11 +513,15 @@ features! { /// Allow multiple packages to participate in the same API namespace (unstable, open_namespaces, "", "reference/unstable.html#open-namespaces"), + + /// Allow paths that resolve relatively to a base specified in the config. + (unstable, path_bases, "", "reference/unstable.html#path-bases"), } /// Status and metadata for a single unstable feature. +#[derive(Debug)] pub struct Feature { - /// Feature name. This is valid Rust identifer so no dash only underscore. + /// Feature name. This is valid Rust identifier so no dash only underscore. name: &'static str, stability: Status, /// Version that this feature was stabilized or removed. @@ -728,10 +736,9 @@ macro_rules! unstable_cli_options { ); let mut expected = vec![$(stringify!($element)),*]; expected[2..].sort(); - snapbox::assert_eq( - format!("{:#?}", expected), - format!("{:#?}", vec![$(stringify!($element)),*]) - ); + let expected = format!("{:#?}", expected); + let actual = format!("{:#?}", vec![$(stringify!($element)),*]); + snapbox::assert_data_eq!(actual, expected); } } } @@ -753,7 +760,7 @@ unstable_cli_options!( build_std: Option> = ("Enable Cargo to compile the standard library itself as part of a crate graph compilation"), build_std_features: Option> = ("Configure features enabled for the standard library itself when building the standard library"), cargo_lints: bool = ("Enable the `[lints.cargo]` table"), - check_cfg: bool = ("Enable compile-time checking of `cfg` names/values/features"), + checksum_freshness: bool = ("Use a checksum to determine if output is fresh rather than filesystem mtime"), codegen_backend: bool = ("Enable the `codegen-backend` option in profiles in .cargo/config.toml file"), config_include: bool = ("Enable the `include` key in config files"), direct_minimal_versions: bool = ("Resolve minimal dependency versions instead of maximum (direct dependencies only)"), @@ -761,7 +768,9 @@ unstable_cli_options!( dual_proc_macros: bool = ("Build proc-macros for both the host and the target"), features: Option>, gc: bool = ("Track cache usage and \"garbage collect\" unused files"), + #[serde(deserialize_with = "deserialize_git_features")] git: Option = ("Enable support for shallow git fetch operations"), + #[serde(deserialize_with = "deserialize_gitoxide_features")] gitoxide: Option = ("Use gitoxide for the given git interactions, or all of them if no argument is given"), host_config: bool = ("Enable the `[host]` section in the .cargo/config.toml file"), minimal_versions: bool = ("Resolve minimal dependency versions instead of maximum"), @@ -769,6 +778,7 @@ unstable_cli_options!( mtime_on_use: bool = ("Configure Cargo to update the mtime of used files"), next_lockfile_bump: bool, no_index_update: bool = ("Do not update the registry index even if the cache is outdated"), + package_workspace: bool = ("Handle intra-workspace dependencies when packaging"), panic_abort_tests: bool = ("Enable support to run tests with -Cpanic=abort"), profile_rustflags: bool = ("Enable the `rustflags` option in profiles in .cargo/config.toml file"), public_dependency: bool = ("Respect a dependency's `public` field in Cargo.toml to control public/private dependencies"), @@ -855,6 +865,9 @@ const STABILIZED_REGISTRY_AUTH: &str = const STABILIZED_LINTS: &str = "The `[lints]` table is now always available."; +const STABILIZED_CHECK_CFG: &str = + "Compile-time checking of conditional (a.k.a. `-Zcheck-cfg`) is now always enabled."; + fn deserialize_build_std<'de, D>(deserializer: D) -> Result>, D::Error> where D: serde::Deserializer<'de>, @@ -868,7 +881,8 @@ where )) } -#[derive(Debug, Copy, Clone, Default, Deserialize)] +#[derive(Debug, Copy, Clone, Default, Deserialize, Ord, PartialOrd, Eq, PartialEq)] +#[serde(default)] pub struct GitFeatures { /// When cloning the index, perform a shallow clone. Maintain shallowness upon subsequent fetches. pub shallow_index: bool, @@ -877,12 +891,71 @@ pub struct GitFeatures { } impl GitFeatures { - fn all() -> Self { + pub fn all() -> Self { GitFeatures { shallow_index: true, shallow_deps: true, } } + + fn expecting() -> String { + let fields = vec!["`shallow-index`", "`shallow-deps`"]; + format!( + "unstable 'git' only takes {} as valid inputs", + fields.join(" and ") + ) + } +} + +fn deserialize_git_features<'de, D>(deserializer: D) -> Result, D::Error> +where + D: serde::de::Deserializer<'de>, +{ + struct GitFeaturesVisitor; + + impl<'de> serde::de::Visitor<'de> for GitFeaturesVisitor { + type Value = Option; + + fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { + formatter.write_str(&GitFeatures::expecting()) + } + + fn visit_bool(self, v: bool) -> Result + where + E: serde::de::Error, + { + if v { + Ok(Some(GitFeatures::all())) + } else { + Ok(None) + } + } + + fn visit_str(self, s: &str) -> Result + where + E: serde::de::Error, + { + Ok(parse_git(s.split(",")).map_err(serde::de::Error::custom)?) + } + + fn visit_some(self, deserializer: D) -> Result + where + D: serde::de::Deserializer<'de>, + { + let git = GitFeatures::deserialize(deserializer)?; + Ok(Some(git)) + } + + fn visit_map(self, map: V) -> Result + where + V: serde::de::MapAccess<'de>, + { + let mvd = serde::de::value::MapAccessDeserializer::new(map); + Ok(Some(GitFeatures::deserialize(mvd)?)) + } + } + + deserializer.deserialize_any(GitFeaturesVisitor) } fn parse_git(it: impl Iterator>) -> CargoResult> { @@ -897,16 +970,15 @@ fn parse_git(it: impl Iterator>) -> CargoResult *shallow_index = true, "shallow-deps" => *shallow_deps = true, _ => { - bail!( - "unstable 'git' only takes 'shallow-index' and 'shallow-deps' as valid inputs" - ) + bail!(GitFeatures::expecting()) } } } Ok(Some(out)) } -#[derive(Debug, Copy, Clone, Default, Deserialize)] +#[derive(Debug, Copy, Clone, Default, Deserialize, Ord, PartialOrd, Eq, PartialEq)] +#[serde(default)] pub struct GitoxideFeatures { /// All fetches are done with `gitoxide`, which includes git dependencies as well as the crates index. pub fetch: bool, @@ -920,7 +992,7 @@ pub struct GitoxideFeatures { } impl GitoxideFeatures { - fn all() -> Self { + pub fn all() -> Self { GitoxideFeatures { fetch: true, checkout: true, @@ -937,6 +1009,67 @@ impl GitoxideFeatures { internal_use_git2: false, } } + + fn expecting() -> String { + let fields = vec!["`fetch`", "`checkout`", "`internal-use-git2`"]; + format!( + "unstable 'gitoxide' only takes {} as valid inputs, for shallow fetches see `-Zgit=shallow-index,shallow-deps`", + fields.join(" and ") + ) + } +} + +fn deserialize_gitoxide_features<'de, D>( + deserializer: D, +) -> Result, D::Error> +where + D: serde::de::Deserializer<'de>, +{ + struct GitoxideFeaturesVisitor; + + impl<'de> serde::de::Visitor<'de> for GitoxideFeaturesVisitor { + type Value = Option; + + fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { + formatter.write_str(&GitoxideFeatures::expecting()) + } + + fn visit_str(self, s: &str) -> Result + where + E: serde::de::Error, + { + Ok(parse_gitoxide(s.split(",")).map_err(serde::de::Error::custom)?) + } + + fn visit_bool(self, v: bool) -> Result + where + E: serde::de::Error, + { + if v { + Ok(Some(GitoxideFeatures::all())) + } else { + Ok(None) + } + } + + fn visit_some(self, deserializer: D) -> Result + where + D: serde::de::Deserializer<'de>, + { + let gitoxide = GitoxideFeatures::deserialize(deserializer)?; + Ok(Some(gitoxide)) + } + + fn visit_map(self, map: V) -> Result + where + V: serde::de::MapAccess<'de>, + { + let mvd = serde::de::value::MapAccessDeserializer::new(map); + Ok(Some(GitoxideFeatures::deserialize(mvd)?)) + } + } + + deserializer.deserialize_any(GitoxideFeaturesVisitor) } fn parse_gitoxide( @@ -955,7 +1088,7 @@ fn parse_gitoxide( "checkout" => *checkout = true, "internal-use-git2" => *internal_use_git2 = true, _ => { - bail!("unstable 'gitoxide' only takes `fetch` and 'checkout' as valid input, for shallow fetches see `-Zgit=shallow-index,shallow-deps`") + bail!(GitoxideFeatures::expecting()) } } } @@ -1109,6 +1242,7 @@ impl CliUnstable { "credential-process" => stabilized_warn(k, "1.74", STABILIZED_CREDENTIAL_PROCESS), "lints" => stabilized_warn(k, "1.74", STABILIZED_LINTS), "registry-auth" => stabilized_warn(k, "1.74", STABILIZED_REGISTRY_AUTH), + "check-cfg" => stabilized_warn(k, "1.80", STABILIZED_CHECK_CFG), // Unstable features // Sorted alphabetically: @@ -1122,9 +1256,6 @@ impl CliUnstable { } "build-std-features" => self.build_std_features = Some(parse_features(v)), "cargo-lints" => self.cargo_lints = parse_empty(k, v)?, - "check-cfg" => { - self.check_cfg = parse_empty(k, v)?; - } "codegen-backend" => self.codegen_backend = parse_empty(k, v)?, "config-include" => self.config_include = parse_empty(k, v)?, "direct-minimal-versions" => self.direct_minimal_versions = parse_empty(k, v)?, @@ -1150,6 +1281,7 @@ impl CliUnstable { // can also be set in .cargo/config or with and ENV "mtime-on-use" => self.mtime_on_use = parse_empty(k, v)?, "no-index-update" => self.no_index_update = parse_empty(k, v)?, + "package-workspace" => self.package_workspace= parse_empty(k, v)?, "panic-abort-tests" => self.panic_abort_tests = parse_empty(k, v)?, "public-dependency" => self.public_dependency = parse_empty(k, v)?, "profile-rustflags" => self.profile_rustflags = parse_empty(k, v)?, @@ -1158,6 +1290,7 @@ impl CliUnstable { "rustdoc-map" => self.rustdoc_map = parse_empty(k, v)?, "rustdoc-scrape-examples" => self.rustdoc_scrape_examples = parse_empty(k, v)?, "separate-nightlies" => self.separate_nightlies = parse_empty(k, v)?, + "checksum-freshness" => self.checksum_freshness = parse_empty(k, v)?, "skip-rustdoc-fingerprint" => self.skip_rustdoc_fingerprint = parse_empty(k, v)?, "script" => self.script = parse_empty(k, v)?, "target-applies-to-host" => self.target_applies_to_host = parse_empty(k, v)?, diff --git a/src/cargo/core/global_cache_tracker.rs b/src/cargo/core/global_cache_tracker.rs index 9add10f26a1..b6cb7a60fdb 100644 --- a/src/cargo/core/global_cache_tracker.rs +++ b/src/cargo/core/global_cache_tracker.rs @@ -543,7 +543,7 @@ impl GlobalCacheTracker { /// Deletes files from the global cache based on the given options. pub fn clean(&mut self, clean_ctx: &mut CleanContext<'_>, gc_opts: &GcOpts) -> CargoResult<()> { self.clean_inner(clean_ctx, gc_opts) - .with_context(|| "failed to clean entries from the global cache") + .context("failed to clean entries from the global cache") } #[tracing::instrument(skip_all)] @@ -575,7 +575,7 @@ impl GlobalCacheTracker { gc_opts.is_download_cache_size_set(), &mut delete_paths, ) - .with_context(|| "failed to sync tracking database")? + .context("failed to sync tracking database")? } if let Some(max_age) = gc_opts.max_index_age { let max_age = now - max_age.as_secs(); @@ -1799,7 +1799,8 @@ pub fn is_silent_error(e: &anyhow::Error) -> bool { } /// Returns the disk usage for a git checkout directory. -pub fn du_git_checkout(path: &Path) -> CargoResult { +#[tracing::instrument] +fn du_git_checkout(path: &Path) -> CargoResult { // !.git is used because clones typically use hardlinks for the git // contents. TODO: Verify behavior on Windows. // TODO: Or even better, switch to worktrees, and remove this. diff --git a/src/cargo/core/manifest.rs b/src/cargo/core/manifest.rs index b2133b204e2..d63dbd61de3 100644 --- a/src/cargo/core/manifest.rs +++ b/src/cargo/core/manifest.rs @@ -64,7 +64,7 @@ pub struct Manifest { contents: Rc, document: Rc>, original_toml: Rc, - resolved_toml: Rc, + normalized_toml: Rc, summary: Summary, // this form of manifest: @@ -110,7 +110,7 @@ pub struct VirtualManifest { contents: Rc, document: Rc>, original_toml: Rc, - resolved_toml: Rc, + normalized_toml: Rc, // this form of manifest: replace: Vec<(PackageIdSpec, Dependency)>, @@ -422,7 +422,7 @@ impl Manifest { contents: Rc, document: Rc>, original_toml: Rc, - resolved_toml: Rc, + normalized_toml: Rc, summary: Summary, default_kind: Option, @@ -451,7 +451,7 @@ impl Manifest { contents, document, original_toml, - resolved_toml, + normalized_toml, summary, default_kind, @@ -483,8 +483,9 @@ impl Manifest { pub fn contents(&self) -> &str { self.contents.as_str() } - pub fn to_resolved_contents(&self) -> CargoResult { - let toml = toml::to_string_pretty(self.resolved_toml())?; + /// See [`Manifest::normalized_toml`] for what "normalized" means + pub fn to_normalized_contents(&self) -> CargoResult { + let toml = toml::to_string_pretty(self.normalized_toml())?; Ok(format!("{}\n{}", MANIFEST_PREAMBLE, toml)) } /// Collection of spans for the original TOML @@ -496,8 +497,13 @@ impl Manifest { &self.original_toml } /// The [`TomlManifest`] with all fields expanded - pub fn resolved_toml(&self) -> &TomlManifest { - &self.resolved_toml + /// + /// This is the intersection of what fields need resolving for cargo-publish that also are + /// useful for the operation of cargo, including + /// - workspace inheritance + /// - target discovery + pub fn normalized_toml(&self) -> &TomlManifest { + &self.normalized_toml } pub fn summary(&self) -> &Summary { &self.summary @@ -547,7 +553,7 @@ impl Manifest { &self.warnings } pub fn profiles(&self) -> Option<&TomlProfiles> { - self.resolved_toml.profile.as_ref() + self.normalized_toml.profile.as_ref() } pub fn publish(&self) -> &Option> { &self.publish @@ -658,7 +664,7 @@ impl VirtualManifest { contents: Rc, document: Rc>, original_toml: Rc, - resolved_toml: Rc, + normalized_toml: Rc, replace: Vec<(PackageIdSpec, Dependency)>, patch: HashMap>, workspace: WorkspaceConfig, @@ -669,7 +675,7 @@ impl VirtualManifest { contents, document, original_toml, - resolved_toml, + normalized_toml, replace, patch, workspace, @@ -692,8 +698,8 @@ impl VirtualManifest { &self.original_toml } /// The [`TomlManifest`] with all fields expanded - pub fn resolved_toml(&self) -> &TomlManifest { - &self.resolved_toml + pub fn normalized_toml(&self) -> &TomlManifest { + &self.normalized_toml } pub fn replace(&self) -> &[(PackageIdSpec, Dependency)] { @@ -709,7 +715,7 @@ impl VirtualManifest { } pub fn profiles(&self) -> Option<&TomlProfiles> { - self.resolved_toml.profile.as_ref() + self.normalized_toml.profile.as_ref() } pub fn warnings_mut(&mut self) -> &mut Warnings { @@ -899,7 +905,7 @@ impl Target { pub fn documented(&self) -> bool { self.inner.doc } - // A plugin, proc-macro, or build-script. + // A proc-macro or build-script. pub fn for_host(&self) -> bool { self.inner.for_host } diff --git a/src/cargo/core/mod.rs b/src/cargo/core/mod.rs index bdc9b2f4fe6..e857ca35dad 100644 --- a/src/cargo/core/mod.rs +++ b/src/cargo/core/mod.rs @@ -1,4 +1,4 @@ -pub use self::dependency::Dependency; +pub use self::dependency::{Dependency, SerializedDependency}; pub use self::features::{CliUnstable, Edition, Feature, Features}; pub use self::manifest::{EitherManifest, VirtualManifest}; pub use self::manifest::{Manifest, Target, TargetKind}; diff --git a/src/cargo/core/package.rs b/src/cargo/core/package.rs index 8cec4a489a8..ac1bcdc5cb7 100644 --- a/src/cargo/core/package.rs +++ b/src/cargo/core/package.rs @@ -22,7 +22,10 @@ use crate::core::compiler::{CompileKind, RustcTargetData}; use crate::core::dependency::DepKind; use crate::core::resolver::features::ForceAllTargets; use crate::core::resolver::{HasDevUnits, Resolve}; -use crate::core::{Dependency, Manifest, PackageId, PackageIdSpec, SourceId, Target}; +use crate::core::{ + CliUnstable, Dependency, Features, Manifest, PackageId, PackageIdSpec, SerializedDependency, + SourceId, Target, +}; use crate::core::{Summary, Workspace}; use crate::sources::source::{MaybePackage, SourceMap}; use crate::util::cache_lock::{CacheLock, CacheLockMode}; @@ -73,7 +76,7 @@ pub struct SerializedPackage { license_file: Option, description: Option, source: SourceId, - dependencies: Vec, + dependencies: Vec, targets: Vec, features: BTreeMap>, manifest_path: PathBuf, @@ -188,7 +191,11 @@ impl Package { self.targets().iter().any(|t| t.is_example() || t.is_bin()) } - pub fn serialized(&self) -> SerializedPackage { + pub fn serialized( + &self, + unstable_flags: &CliUnstable, + cargo_features: &Features, + ) -> SerializedPackage { let summary = self.manifest().summary(); let package_id = summary.package_id(); let manmeta = self.manifest().metadata(); @@ -203,7 +210,7 @@ impl Package { .cloned() .collect(); // Convert Vec to Vec - let features = summary + let crate_features = summary .features() .iter() .map(|(k, v)| { @@ -224,9 +231,13 @@ impl Package { license_file: manmeta.license_file.clone(), description: manmeta.description.clone(), source: summary.source_id(), - dependencies: summary.dependencies().to_vec(), + dependencies: summary + .dependencies() + .iter() + .map(|dep| dep.serialized(unstable_flags, cargo_features)) + .collect(), targets, - features, + features: crate_features, manifest_path: self.manifest_path().to_path_buf(), metadata: self.manifest().custom_metadata().cloned(), authors: manmeta.authors.clone(), @@ -393,7 +404,7 @@ impl<'gctx> PackageSet<'gctx> { let multiplexing = gctx.http_config()?.multiplexing.unwrap_or(true); multi .pipelining(false, multiplexing) - .with_context(|| "failed to enable multiplexing/pipelining in curl")?; + .context("failed to enable multiplexing/pipelining in curl")?; // let's not flood crates.io with connections multi.set_max_host_connections(2)?; @@ -654,6 +665,7 @@ impl<'a, 'gctx> Downloads<'a, 'gctx> { /// Returns `None` if the package is queued up for download and will /// eventually be returned from `wait_for_download`. Returns `Some(pkg)` if /// the package is ready and doesn't need to be downloaded. + #[tracing::instrument(skip_all)] pub fn start(&mut self, id: PackageId) -> CargoResult> { self.start_inner(id) .with_context(|| format!("failed to download `{}`", id)) @@ -680,7 +692,7 @@ impl<'a, 'gctx> Downloads<'a, 'gctx> { .ok_or_else(|| internal(format!("couldn't find source for `{}`", id)))?; let pkg = source .download(id) - .with_context(|| "unable to get packages from source")?; + .context("unable to get packages from source")?; let (url, descriptor, authorization) = match pkg { MaybePackage::Ready(pkg) => { debug!("{} doesn't need a download", id); @@ -793,6 +805,7 @@ impl<'a, 'gctx> Downloads<'a, 'gctx> { /// # Panics /// /// This function will panic if there are no remaining downloads. + #[tracing::instrument(skip_all)] pub fn wait(&mut self) -> CargoResult<&'a Package> { let (dl, data) = loop { assert_eq!(self.pending.len(), self.pending_ids.len()); @@ -949,7 +962,7 @@ impl<'a, 'gctx> Downloads<'a, 'gctx> { self.set .multi .perform() - .with_context(|| "failed to perform http requests") + .context("failed to perform http requests") })?; debug!(target: "network", "handles remaining: {}", n); let results = &mut self.results; @@ -979,7 +992,7 @@ impl<'a, 'gctx> Downloads<'a, 'gctx> { self.set .multi .wait(&mut [], timeout) - .with_context(|| "failed to wait on curl `Multi`")?; + .context("failed to wait on curl `Multi`")?; } } } diff --git a/src/cargo/core/registry.rs b/src/cargo/core/registry.rs index 533b0f62392..37af3d7a130 100644 --- a/src/cargo/core/registry.rs +++ b/src/cargo/core/registry.rs @@ -113,11 +113,23 @@ pub struct PackageRegistry<'gctx> { yanked_whitelist: HashSet, source_config: SourceConfigMap<'gctx>, + /// Patches registered during calls to [`PackageRegistry::patch`]. + /// + /// These are available for `query` after calling [`PackageRegistry::lock_patches`], + /// which `lock`s them all to specific versions. patches: HashMap>, /// Whether patches are locked. That is, they are available to resolution. /// /// See [`PackageRegistry::lock_patches`] and [`PackageRegistry::patch`] for more. patches_locked: bool, + /// Patches available for each source. + /// + /// This is for determining whether a dependency entry from a lockfile + /// happened through `[patch]`, during calls to [`lock`] to rewrite + /// summaries to point directly at these patched entries. + /// + /// This is constructed during calls to [`PackageRegistry::patch`], + /// along with the `patches` field, thoough these entries never get locked. patches_available: HashMap>, } @@ -157,6 +169,15 @@ enum Kind { Normal, } +/// This tuple is an argument to [`PackageRegistry::patch`]. +/// +/// * The first element is the patch definition straight from the manifest. +/// * The second element is an optional variant where the patch has been locked. +/// It is the patch locked to a specific version found in Cargo.lock. +/// This will be `None` if `Cargo.lock` doesn't exist, +/// or the patch did not match any existing entries in `Cargo.lock`. +pub type PatchDependency<'a> = (&'a Dependency, Option); + /// Argument to [`PackageRegistry::patch`] which is information about a `[patch]` /// directive that we found in a lockfile, if present. pub struct LockedPatchDependency { @@ -173,8 +194,10 @@ pub struct LockedPatchDependency { } impl<'gctx> PackageRegistry<'gctx> { - pub fn new(gctx: &'gctx GlobalContext) -> CargoResult> { - let source_config = SourceConfigMap::new(gctx)?; + pub fn new_with_source_config( + gctx: &'gctx GlobalContext, + source_config: SourceConfigMap<'gctx>, + ) -> CargoResult> { Ok(PackageRegistry { gctx, sources: SourceMap::new(), @@ -214,7 +237,7 @@ impl<'gctx> PackageRegistry<'gctx> { } // If the previous source has the same precise version as we do, - // then we're done, otherwise we need to need to move forward + // then we're done, otherwise we need to move forward // updating this source. Some((previous, _)) => { if previous.has_same_precise_as(namespace) { @@ -303,14 +326,8 @@ impl<'gctx> PackageRegistry<'gctx> { /// The `deps` is an array of all the entries in the `[patch]` section of /// the manifest. /// - /// Here the `deps` will be resolved to a precise version and stored - /// internally for future calls to `query` below. `deps` should be a tuple - /// where the first element is the patch definition straight from the - /// manifest, and the second element is an optional variant where the - /// patch has been locked. This locked patch is the patch locked to - /// a specific version found in Cargo.lock. This will be `None` if - /// `Cargo.lock` doesn't exist, or the patch did not match any existing - /// entries in `Cargo.lock`. + /// Here the `patch_deps` will be resolved to a precise version and stored + /// internally for future calls to `query` below. /// /// Note that the patch list specified here *will not* be available to /// [`Registry::query`] until [`PackageRegistry::lock_patches`] is called @@ -319,10 +336,11 @@ impl<'gctx> PackageRegistry<'gctx> { /// The return value is a `Vec` of patches that should *not* be locked. /// This happens when the patch is locked, but the patch has been updated /// so the locked value is no longer correct. + #[tracing::instrument(skip(self, patch_deps))] pub fn patch( &mut self, url: &Url, - deps: &[(&Dependency, Option)], + patch_deps: &[PatchDependency<'_>], ) -> CargoResult> { // NOTE: None of this code is aware of required features. If a patch // is missing a required feature, you end up with an "unused patch" @@ -333,9 +351,9 @@ impl<'gctx> PackageRegistry<'gctx> { // Return value of patches that shouldn't be locked. let mut unlock_patches = Vec::new(); - // First up we need to actually resolve each `deps` specification to - // precisely one summary. We're not using the `query` method below as it - // internally uses maps we're building up as part of this method + // First up we need to actually resolve each `patch_deps` specification + // to precisely one summary. We're not using the `query` method below + // as it internally uses maps we're building up as part of this method // (`patches_available` and `patches`). Instead we're going straight to // the source to load information from it. // @@ -343,12 +361,12 @@ impl<'gctx> PackageRegistry<'gctx> { // precisely one package, so that's why we're just creating a flat list // of summaries which should be the same length as `deps` above. - let mut deps_remaining: Vec<_> = deps.iter().collect(); + let mut patch_deps_remaining: Vec<_> = patch_deps.iter().collect(); let mut unlocked_summaries = Vec::new(); - while !deps_remaining.is_empty() { - let mut deps_pending = Vec::new(); - for dep_remaining in deps_remaining { - let (orig_patch, locked) = dep_remaining; + while !patch_deps_remaining.is_empty() { + let mut patch_deps_pending = Vec::new(); + for patch_dep_remaining in patch_deps_remaining { + let (orig_patch, locked) = patch_dep_remaining; // Use the locked patch if it exists, otherwise use the original. let dep = match locked { @@ -388,7 +406,7 @@ impl<'gctx> PackageRegistry<'gctx> { let summaries = match source.query_vec(dep, QueryKind::Exact)? { Poll::Ready(deps) => deps, Poll::Pending => { - deps_pending.push(dep_remaining); + patch_deps_pending.push(patch_dep_remaining); continue; } }; @@ -399,7 +417,7 @@ impl<'gctx> PackageRegistry<'gctx> { match summary_for_patch(orig_patch, &locked, summaries, source) { Poll::Ready(x) => x, Poll::Pending => { - deps_pending.push(dep_remaining); + patch_deps_pending.push(patch_dep_remaining); continue; } } @@ -426,13 +444,13 @@ impl<'gctx> PackageRegistry<'gctx> { patches must point to different sources", dep.package_name(), url - )) - .context(format!("failed to resolve patches for `{}`", url)); + ) + .context(format!("failed to resolve patches for `{}`", url))); } unlocked_summaries.push(summary); } - deps_remaining = deps_pending; + patch_deps_remaining = patch_deps_pending; self.block_until_ready()?; } @@ -450,25 +468,18 @@ impl<'gctx> PackageRegistry<'gctx> { } } - // Calculate a list of all patches available for this source which is - // then used later during calls to `lock` to rewrite summaries to point - // directly at these patched entries. - // - // Note that this is somewhat subtle where the list of `ids` for a - // canonical URL is extend with possibly two ids per summary. This is done - // to handle the transition from the v2->v3 lock file format where in - // v2 DefaultBranch was either DefaultBranch or Branch("master") for - // git dependencies. In this case if `summary.package_id()` is - // Branch("master") then alt_package_id will be DefaultBranch. This - // signifies that there's a patch available for either of those - // dependency directives if we see them in the dependency graph. - // - // This is a bit complicated and hopefully an edge case we can remove - // in the future, but for now it hopefully doesn't cause too much - // harm... + // Calculate a list of all patches available for this source. let mut ids = Vec::new(); - for (summary, (_, lock)) in unlocked_summaries.iter().zip(deps) { + for (summary, (_, lock)) in unlocked_summaries.iter().zip(patch_deps) { ids.push(summary.package_id()); + // This is subtle where the list of `ids` for a canonical URL is + // extend with possibly two ids per summary. This is done to handle + // the transition from the v2->v3 lock file format where in v2 + // DefaultBranch was either DefaultBranch or Branch("master") for + // git dependencies. In this case if `summary.package_id()` is + // Branch("master") then alt_package_id will be DefaultBranch. This + // signifies that there's a patch available for either of those + // dependency directives if we see them in the dependency graph. if let Some(lock) = lock { ids.extend(lock.alt_package_id); } @@ -636,139 +647,126 @@ impl<'gctx> Registry for PackageRegistry<'gctx> { f: &mut dyn FnMut(IndexSummary), ) -> Poll> { assert!(self.patches_locked); - let (override_summary, n, to_warn) = { - // Look for an override and get ready to query the real source. - let override_summary = ready!(self.query_overrides(dep))?; - - // Next up on our list of candidates is to check the `[patch]` - // section of the manifest. Here we look through all patches - // relevant to the source that `dep` points to, and then we match - // name/version. Note that we don't use `dep.matches(..)` because - // the patches, by definition, come from a different source. - // This means that `dep.matches(..)` will always return false, when - // what we really care about is the name/version match. - let mut patches = Vec::::new(); - if let Some(extra) = self.patches.get(dep.source_id().canonical_url()) { - patches.extend( - extra - .iter() - .filter(|s| dep.matches_ignoring_source(s.package_id())) - .cloned(), - ); - } + // Look for an override and get ready to query the real source. + let override_summary = ready!(self.query_overrides(dep))?; + + // Next up on our list of candidates is to check the `[patch]` section + // of the manifest. Here we look through all patches relevant to the + // source that `dep` points to, and then we match name/version. Note + // that we don't use `dep.matches(..)` because the patches, by definition, + // come from a different source. This means that `dep.matches(..)` will + // always return false, when what we really care about is the name/version match. + let mut patches = Vec::::new(); + if let Some(extra) = self.patches.get(dep.source_id().canonical_url()) { + patches.extend( + extra + .iter() + .filter(|s| dep.matches_ignoring_source(s.package_id())) + .cloned(), + ); + } - // A crucial feature of the `[patch]` feature is that we *don't* - // query the actual registry if we have a "locked" dependency. A - // locked dep basically just means a version constraint of `=a.b.c`, - // and because patches take priority over the actual source then if - // we have a candidate we're done. - if patches.len() == 1 && dep.is_locked() { - let patch = patches.remove(0); - match override_summary { - Some(summary) => (summary, 1, Some(IndexSummary::Candidate(patch))), - None => { - f(IndexSummary::Candidate(patch)); - return Poll::Ready(Ok(())); - } - } - } else { - if !patches.is_empty() { - debug!( - "found {} patches with an unlocked dep on `{}` at {} \ - with `{}`, \ - looking at sources", - patches.len(), - dep.package_name(), - dep.source_id(), - dep.version_req() - ); + // A crucial feature of the `[patch]` feature is that we don't query the + // actual registry if we have a "locked" dependency. A locked dep basically + // just means a version constraint of `=a.b.c`, and because patches take + // priority over the actual source then if we have a candidate we're done. + if patches.len() == 1 && dep.is_locked() { + let patch = patches.remove(0); + match override_summary { + Some(override_summary) => { + let override_summary = override_summary.into_summary(); + self.warn_bad_override(&override_summary, &patch)?; + f(IndexSummary::Candidate(self.lock(override_summary))); } + None => f(IndexSummary::Candidate(patch)), + } - // Ensure the requested source_id is loaded - self.ensure_loaded(dep.source_id(), Kind::Normal) - .with_context(|| { - format!( - "failed to load source for dependency `{}`", - dep.package_name() - ) - })?; + return Poll::Ready(Ok(())); + } - let source = self.sources.get_mut(dep.source_id()); - match (override_summary, source) { - (Some(_), None) => { - return Poll::Ready(Err(anyhow::anyhow!("override found but no real ones"))) - } - (None, None) => return Poll::Ready(Ok(())), + if !patches.is_empty() { + debug!( + "found {} patches with an unlocked dep on `{}` at {} \ + with `{}`, \ + looking at sources", + patches.len(), + dep.package_name(), + dep.source_id(), + dep.version_req() + ); + } - // If we don't have an override then we just ship - // everything upstairs after locking the summary - (None, Some(source)) => { - for patch in patches.iter() { - f(IndexSummary::Candidate(patch.clone())); - } + // Ensure the requested source_id is loaded + self.ensure_loaded(dep.source_id(), Kind::Normal) + .with_context(|| { + format!( + "failed to load source for dependency `{}`", + dep.package_name() + ) + })?; - // Our sources shouldn't ever come back to us with two - // summaries that have the same version. We could, - // however, have an `[patch]` section which is in use - // to override a version in the registry. This means - // that if our `summary` in this loop has the same - // version as something in `patches` that we've - // already selected, then we skip this `summary`. - let locked = &self.locked; - let all_patches = &self.patches_available; - let callback = &mut |summary: IndexSummary| { - for patch in patches.iter() { - let patch = patch.package_id().version(); - if summary.package_id().version() == patch { - return; - } - } - f(IndexSummary::Candidate(lock( - locked, - all_patches, - summary.into_summary(), - ))) - }; - return source.query(dep, kind, callback); - } + let source = self.sources.get_mut(dep.source_id()); + match (override_summary, source) { + (Some(_), None) => { + return Poll::Ready(Err(anyhow::anyhow!("override found but no real ones"))) + } + (None, None) => return Poll::Ready(Ok(())), - // If we have an override summary then we query the source - // to sanity check its results. We don't actually use any of - // the summaries it gives us though. - (Some(override_summary), Some(source)) => { - if !patches.is_empty() { - return Poll::Ready(Err(anyhow::anyhow!( - "found patches and a path override" - ))); - } - let mut n = 0; - let mut to_warn = None; - { - let callback = &mut |summary| { - n += 1; - to_warn = Some(summary); - }; - let pend = source.query(dep, kind, callback); - if pend.is_pending() { - return Poll::Pending; - } + // If we don't have an override then we just ship everything upstairs after locking the summary + (None, Some(source)) => { + for patch in patches.iter() { + f(IndexSummary::Candidate(patch.clone())); + } + + // Our sources shouldn't ever come back to us with two summaries + // that have the same version. We could, however, have an `[patch]` + // section which is in use to override a version in the registry. + // This means that if our `summary` in this loop has the same + // version as something in `patches` that we've already selected, + // then we skip this `summary`. + let locked = &self.locked; + let all_patches = &self.patches_available; + let callback = &mut |summary: IndexSummary| { + for patch in patches.iter() { + let patch = patch.package_id().version(); + if summary.package_id().version() == patch { + return; } - (override_summary, n, to_warn) } + let summary = summary.into_summary(); + f(IndexSummary::Candidate(lock(locked, all_patches, summary))) + }; + return source.query(dep, kind, callback); + } + + // If we have an override summary then we query the source to sanity check its results. + // We don't actually use any of the summaries it gives us though. + (Some(override_summary), Some(source)) => { + if !patches.is_empty() { + return Poll::Ready(Err(anyhow::anyhow!("found patches and a path override"))); + } + let mut n = 0; + let mut to_warn = None; + let callback = &mut |summary| { + n += 1; + to_warn = Some(summary); + }; + let pend = source.query(dep, kind, callback); + if pend.is_pending() { + return Poll::Pending; + } + if n > 1 { + return Poll::Ready(Err(anyhow::anyhow!( + "found an override with a non-locked list" + ))); + } + let override_summary = override_summary.into_summary(); + if let Some(to_warn) = to_warn { + self.warn_bad_override(&override_summary, to_warn.as_summary())?; } + f(IndexSummary::Candidate(self.lock(override_summary))); } - }; - - if n > 1 { - return Poll::Ready(Err(anyhow::anyhow!( - "found an override with a non-locked list" - ))); - } else if let Some(summary) = to_warn { - self.warn_bad_override(override_summary.as_summary(), summary.as_summary())?; } - f(IndexSummary::Candidate( - self.lock(override_summary.into_summary()), - )); Poll::Ready(Ok(())) } @@ -787,6 +785,7 @@ impl<'gctx> Registry for PackageRegistry<'gctx> { } } + #[tracing::instrument(skip_all)] fn block_until_ready(&mut self) -> CargoResult<()> { if cfg!(debug_assertions) { // Force borrow to catch invalid borrows, regardless of which source is used and how it diff --git a/src/cargo/core/resolver/conflict_cache.rs b/src/cargo/core/resolver/conflict_cache.rs index a3c57fcbc38..f8543644561 100644 --- a/src/cargo/core/resolver/conflict_cache.rs +++ b/src/cargo/core/resolver/conflict_cache.rs @@ -194,11 +194,6 @@ impl ConflictCache { /// `dep` is known to be unresolvable if /// all the `PackageId` entries are activated. pub fn insert(&mut self, dep: &Dependency, con: &ConflictMap) { - if con.values().any(|c| c.is_public_dependency()) { - // TODO: needs more info for back jumping - // for now refuse to cache it. - return; - } self.con_from_dep .entry(dep.clone()) .or_insert_with(|| ConflictStoreTrie::Node(BTreeMap::new())) diff --git a/src/cargo/core/resolver/dep_cache.rs b/src/cargo/core/resolver/dep_cache.rs index f91bfa8c30f..e0964fcc07d 100644 --- a/src/cargo/core/resolver/dep_cache.rs +++ b/src/cargo/core/resolver/dep_cache.rs @@ -21,7 +21,7 @@ use crate::core::{ }; use crate::sources::source::QueryKind; use crate::util::errors::CargoResult; -use crate::util::interning::InternedString; +use crate::util::interning::{InternedString, INTERNED_DEFAULT}; use anyhow::Context as _; use std::collections::{BTreeSet, HashMap, HashSet}; @@ -348,7 +348,7 @@ fn build_requirements<'a, 'b: 'a>( let handle_default = |uses_default_features, reqs: &mut Requirements<'_>| { if uses_default_features && s.features().contains_key("default") { - if let Err(e) = reqs.require_feature(InternedString::new("default")) { + if let Err(e) = reqs.require_feature(INTERNED_DEFAULT) { return Err(e.into_activate_error(parent, s)); } } diff --git a/src/cargo/core/resolver/encode.rs b/src/cargo/core/resolver/encode.rs index 2b4c3008236..15ef325aceb 100644 --- a/src/cargo/core/resolver/encode.rs +++ b/src/cargo/core/resolver/encode.rs @@ -154,7 +154,7 @@ impl EncodableResolve { /// primary uses is to be used with `resolve_with_previous` to guide the /// resolver to create a complete Resolve. pub fn into_resolve(self, original: &str, ws: &Workspace<'_>) -> CargoResult { - let path_deps = build_path_deps(ws)?; + let path_deps: HashMap> = build_path_deps(ws)?; let mut checksums = HashMap::new(); let mut version = match self.version { @@ -202,7 +202,11 @@ impl EncodableResolve { if !all_pkgs.insert(enc_id.clone()) { anyhow::bail!("package `{}` is specified twice in the lockfile", pkg.name); } - let id = match pkg.source.as_deref().or_else(|| path_deps.get(&pkg.name)) { + let id = match pkg + .source + .as_deref() + .or_else(|| get_source_id(&path_deps, pkg)) + { // We failed to find a local package in the workspace. // It must have been removed and should be ignored. None => { @@ -364,7 +368,11 @@ impl EncodableResolve { let mut unused_patches = Vec::new(); for pkg in self.patch.unused { - let id = match pkg.source.as_deref().or_else(|| path_deps.get(&pkg.name)) { + let id = match pkg + .source + .as_deref() + .or_else(|| get_source_id(&path_deps, &pkg)) + { Some(&src) => PackageId::try_new(&pkg.name, &pkg.version, src)?, None => continue, }; @@ -395,7 +403,7 @@ impl EncodableResolve { version = ResolveVersion::V2; } - Ok(Resolve::new( + return Ok(Resolve::new( g, replacements, HashMap::new(), @@ -404,11 +412,35 @@ impl EncodableResolve { unused_patches, version, HashMap::new(), - )) + )); + + fn get_source_id<'a>( + path_deps: &'a HashMap>, + pkg: &'a EncodableDependency, + ) -> Option<&'a SourceId> { + path_deps.iter().find_map(|(name, version_source)| { + if name != &pkg.name || version_source.len() == 0 { + return None; + } + if version_source.len() == 1 { + return Some(version_source.values().next().unwrap()); + } + // If there are multiple candidates for the same name, it needs to be determined by combining versions (See #13405). + if let Ok(pkg_version) = pkg.version.parse::() { + if let Some(source_id) = version_source.get(&pkg_version) { + return Some(source_id); + } + } + + None + }) + } } } -fn build_path_deps(ws: &Workspace<'_>) -> CargoResult> { +fn build_path_deps( + ws: &Workspace<'_>, +) -> CargoResult>> { // If a crate is **not** a path source, then we're probably in a situation // such as `cargo install` with a lock file from a remote dependency. In // that case we don't need to fixup any path dependencies (as they're not @@ -418,13 +450,15 @@ fn build_path_deps(ws: &Workspace<'_>) -> CargoResult> .filter(|p| p.package_id().source_id().is_path()) .collect::>(); - let mut ret = HashMap::new(); + let mut ret: HashMap> = HashMap::new(); let mut visited = HashSet::new(); for member in members.iter() { - ret.insert( - member.package_id().name().to_string(), - member.package_id().source_id(), - ); + ret.entry(member.package_id().name().to_string()) + .or_insert_with(HashMap::new) + .insert( + member.package_id().version().clone(), + member.package_id().source_id(), + ); visited.insert(member.package_id().source_id()); } for member in members.iter() { @@ -444,7 +478,7 @@ fn build_path_deps(ws: &Workspace<'_>) -> CargoResult> fn build_pkg( pkg: &Package, ws: &Workspace<'_>, - ret: &mut HashMap, + ret: &mut HashMap>, visited: &mut HashSet, ) { for dep in pkg.dependencies() { @@ -455,7 +489,7 @@ fn build_path_deps(ws: &Workspace<'_>) -> CargoResult> fn build_dep( dep: &Dependency, ws: &Workspace<'_>, - ret: &mut HashMap, + ret: &mut HashMap>, visited: &mut HashSet, ) { let id = dep.source_id(); @@ -467,7 +501,12 @@ fn build_path_deps(ws: &Workspace<'_>) -> CargoResult> Err(_) => return, }; let Ok(pkg) = ws.load(&path) else { return }; - ret.insert(pkg.name().to_string(), pkg.package_id().source_id()); + ret.entry(pkg.package_id().name().to_string()) + .or_insert_with(HashMap::new) + .insert( + pkg.package_id().version().clone(), + pkg.package_id().source_id(), + ); visited.insert(pkg.package_id().source_id()); build_pkg(&pkg, ws, ret, visited); } @@ -494,8 +533,6 @@ pub struct EncodableDependency { /// The serialization for `SourceId` doesn't do URL encode for parameters. /// In contrast, this type is aware of that whenever [`ResolveVersion`] allows /// us to do so (v4 or later). -/// -/// [`EncodableResolve`] turns into a ` #[derive(Deserialize, Debug, PartialOrd, Ord, Clone)] #[serde(transparent)] pub struct EncodableSourceId { diff --git a/src/cargo/core/resolver/errors.rs b/src/cargo/core/resolver/errors.rs index 5c5cf9dc5f4..7fbf4e22c0e 100644 --- a/src/cargo/core/resolver/errors.rs +++ b/src/cargo/core/resolver/errors.rs @@ -193,14 +193,6 @@ pub(super) fn activation_error( ); // p == parent so the full path is redundant. } - ConflictReason::PublicDependency(pkg_id) => { - // TODO: This needs to be implemented. - unimplemented!("pub dep {:?}", pkg_id); - } - ConflictReason::PubliclyExports(pkg_id) => { - // TODO: This needs to be implemented. - unimplemented!("pub exp {:?}", pkg_id); - } } } diff --git a/src/cargo/core/resolver/features.rs b/src/cargo/core/resolver/features.rs index fdb49c0bf9f..29d90957ae5 100644 --- a/src/cargo/core/resolver/features.rs +++ b/src/cargo/core/resolver/features.rs @@ -43,7 +43,7 @@ use crate::core::dependency::{ArtifactTarget, DepKind, Dependency}; use crate::core::resolver::types::FeaturesSet; use crate::core::resolver::{Resolve, ResolveBehavior}; use crate::core::{FeatureValue, PackageId, PackageIdSpec, PackageSet, Workspace}; -use crate::util::interning::InternedString; +use crate::util::interning::{InternedString, INTERNED_DEFAULT}; use crate::util::CargoResult; use anyhow::{bail, Context}; use itertools::Itertools; @@ -319,8 +319,30 @@ impl ResolvedFeatures { pkg_id: PackageId, features_for: FeaturesFor, ) -> Vec { - self.activated_features_int(pkg_id, features_for) - .expect("activated_features for invalid package") + if let Some(res) = self.activated_features_unverified(pkg_id, features_for) { + res + } else { + panic!( + "did not find features for ({pkg_id:?}, {features_for:?}) within activated_features:\n{:#?}", + self.activated_features.keys() + ) + } + } + + /// Variant of `activated_features` that returns `None` if this is + /// not a valid pkg_id/is_build combination. Used in places which do + /// not know which packages are activated (like `cargo clean`). + pub fn activated_features_unverified( + &self, + pkg_id: PackageId, + features_for: FeaturesFor, + ) -> Option> { + let fk = features_for.apply_opts(&self.opts); + if let Some(fs) = self.activated_features.get(&(pkg_id, fk)) { + Some(fs.iter().cloned().collect()) + } else { + None + } } /// Returns if the given dependency should be included. @@ -340,30 +362,6 @@ impl ResolvedFeatures { .unwrap_or(false) } - /// Variant of `activated_features` that returns `None` if this is - /// not a valid pkg_id/is_build combination. Used in places which do - /// not know which packages are activated (like `cargo clean`). - pub fn activated_features_unverified( - &self, - pkg_id: PackageId, - features_for: FeaturesFor, - ) -> Option> { - self.activated_features_int(pkg_id, features_for).ok() - } - - fn activated_features_int( - &self, - pkg_id: PackageId, - features_for: FeaturesFor, - ) -> CargoResult> { - let fk = features_for.apply_opts(&self.opts); - if let Some(fs) = self.activated_features.get(&(pkg_id, fk)) { - Ok(fs.iter().cloned().collect()) - } else { - bail!("features did not find {:?} {:?}", pkg_id, fk) - } - } - /// Compares the result against the original resolver behavior. /// /// Used by `cargo fix --edition` to display any differences. @@ -426,10 +424,12 @@ pub struct FeatureResolver<'a, 'gctx> { /// If this is `true`, then a non-default `feature_key` needs to be tracked while /// traversing the graph. /// - /// This is only here to avoid calling `is_proc_macro` when all feature - /// options are disabled (because `is_proc_macro` can trigger downloads). - /// This has to be separate from `FeatureOpts.decouple_host_deps` because + /// This is only here to avoid calling [`has_any_proc_macro`] when all feature + /// options are disabled (because [`has_any_proc_macro`] can trigger downloads). + /// This has to be separate from [`FeatureOpts::decouple_host_deps`] because /// `for_host` tracking is also needed for `itarget` to work properly. + /// + /// [`has_any_proc_macro`]: FeatureResolver::has_any_proc_macro track_for_host: bool, /// `dep_name?/feat_name` features that will be activated if `dep_name` is /// ever activated. @@ -490,7 +490,7 @@ impl<'a, 'gctx> FeatureResolver<'a, 'gctx> { let member_features = self.ws.members_with_features(specs, cli_features)?; for (member, cli_features) in &member_features { let fvs = self.fvs_from_requested(member.package_id(), cli_features); - let fk = if self.track_for_host && self.is_proc_macro(member.package_id()) { + let fk = if self.track_for_host && self.has_any_proc_macro(member.package_id()) { // Also activate for normal dependencies. This is needed if the // proc-macro includes other targets (like binaries or tests), // or running in `cargo test`. Note that in a workspace, if @@ -743,9 +743,8 @@ impl<'a, 'gctx> FeatureResolver<'a, 'gctx> { .iter() .map(|f| FeatureValue::new(*f)) .collect(); - let default = InternedString::new("default"); - if dep.uses_default_features() && feature_map.contains_key(&default) { - result.push(FeatureValue::Feature(default)); + if dep.uses_default_features() && feature_map.contains_key(&INTERNED_DEFAULT) { + result.push(FeatureValue::Feature(INTERNED_DEFAULT)); } result } @@ -760,9 +759,8 @@ impl<'a, 'gctx> FeatureResolver<'a, 'gctx> { let feature_map = summary.features(); let mut result: Vec = cli_features.features.iter().cloned().collect(); - let default = InternedString::new("default"); - if cli_features.uses_default_features && feature_map.contains_key(&default) { - result.push(FeatureValue::Feature(default)); + if cli_features.uses_default_features && feature_map.contains_key(&INTERNED_DEFAULT) { + result.push(FeatureValue::Feature(INTERNED_DEFAULT)); } if cli_features.all_features { @@ -852,7 +850,7 @@ impl<'a, 'gctx> FeatureResolver<'a, 'gctx> { // for various targets which are either specified in the manifest // or on the cargo command-line. let lib_fk = if fk == FeaturesFor::default() { - (self.track_for_host && (dep.is_build() || self.is_proc_macro(dep_id))) + (self.track_for_host && (dep.is_build() || self.has_proc_macro_lib(dep_id))) .then(|| FeaturesFor::HostDep) .unwrap_or_default() } else { @@ -957,10 +955,24 @@ impl<'a, 'gctx> FeatureResolver<'a, 'gctx> { } } - fn is_proc_macro(&self, package_id: PackageId) -> bool { + /// Whether the given package has any proc macro target, including proc-macro examples. + fn has_any_proc_macro(&self, package_id: PackageId) -> bool { self.package_set .get_one(package_id) .expect("packages downloaded") .proc_macro() } + + /// Whether the given package is a proc macro lib target. + /// + /// This is useful for checking if a dependency is a proc macro, + /// as it is not possible to depend on a non-lib target as a proc-macro. + fn has_proc_macro_lib(&self, package_id: PackageId) -> bool { + self.package_set + .get_one(package_id) + .expect("packages downloaded") + .library() + .map(|lib| lib.proc_macro()) + .unwrap_or_default() + } } diff --git a/src/cargo/core/resolver/mod.rs b/src/cargo/core/resolver/mod.rs index 20072331354..010c03bb85b 100644 --- a/src/cargo/core/resolver/mod.rs +++ b/src/cargo/core/resolver/mod.rs @@ -820,10 +820,6 @@ fn generalize_conflicting( let backtrack_critical_reason: ConflictReason = conflicting_activations[&backtrack_critical_id].clone(); - if backtrack_critical_reason.is_public_dependency() { - return None; - } - if cx .parents .is_path_from_to(&parent.package_id(), &backtrack_critical_id) @@ -1004,69 +1000,57 @@ fn find_candidate( } fn check_cycles(resolve: &Resolve) -> CargoResult<()> { - // Create a simple graph representation alternative of `resolve` which has - // only the edges we care about. Note that `BTree*` is used to produce - // deterministic error messages here. Also note that the main reason for - // this copy of the resolve graph is to avoid edges between a crate and its - // dev-dependency since that doesn't count for cycles. - let mut graph = BTreeMap::new(); - for id in resolve.iter() { - let map = graph.entry(id).or_insert_with(BTreeMap::new); - for (dep_id, listings) in resolve.deps_not_replaced(id) { - let transitive_dep = listings.iter().find(|d| d.is_transitive()); - - if let Some(transitive_dep) = transitive_dep.cloned() { - map.insert(dep_id, transitive_dep.clone()); - resolve - .replacement(dep_id) - .map(|p| map.insert(p, transitive_dep)); - } - } - } - - // After we have the `graph` that we care about, perform a simple cycle - // check by visiting all nodes. We visit each node at most once and we keep + // Perform a simple cycle check by visiting all nodes. + // We visit each node at most once and we keep // track of the path through the graph as we walk it. If we walk onto the // same node twice that's a cycle. - let mut checked = HashSet::new(); - let mut path = Vec::new(); - let mut visited = HashSet::new(); - for pkg in graph.keys() { - if !checked.contains(pkg) { - visit(&graph, *pkg, &mut visited, &mut path, &mut checked)? + let mut checked = HashSet::with_capacity(resolve.len()); + let mut path = Vec::with_capacity(4); + let mut visited = HashSet::with_capacity(4); + for pkg in resolve.iter() { + if !checked.contains(&pkg) { + visit(&resolve, pkg, &mut visited, &mut path, &mut checked)? } } return Ok(()); fn visit( - graph: &BTreeMap>, + resolve: &Resolve, id: PackageId, visited: &mut HashSet, path: &mut Vec, checked: &mut HashSet, ) -> CargoResult<()> { - path.push(id); if !visited.insert(id) { - let iter = path.iter().rev().skip(1).scan(id, |child, parent| { - let dep = graph.get(parent).and_then(|adjacent| adjacent.get(child)); + // We found a cycle and need to construct an error. Performance is no longer top priority. + let iter = path.iter().rev().scan(id, |child, parent| { + let dep = resolve.transitive_deps_not_replaced(*parent).find_map( + |(dep_id, transitive_dep)| { + (*child == dep_id || Some(*child) == resolve.replacement(dep_id)) + .then_some(transitive_dep) + }, + ); *child = *parent; Some((parent, dep)) }); let iter = std::iter::once((&id, None)).chain(iter); + let describe_path = errors::describe_path(iter); anyhow::bail!( - "cyclic package dependency: package `{}` depends on itself. Cycle:\n{}", - id, - errors::describe_path(iter), + "cyclic package dependency: package `{id}` depends on itself. Cycle:\n{describe_path}" ); } if checked.insert(id) { - for dep in graph[&id].keys() { - visit(graph, *dep, visited, path, checked)?; + path.push(id); + for (dep_id, _transitive_dep) in resolve.transitive_deps_not_replaced(id) { + visit(resolve, dep_id, visited, path, checked)?; + if let Some(replace_id) = resolve.replacement(dep_id) { + visit(resolve, replace_id, visited, path, checked)?; + } } + path.pop(); } - path.pop(); visited.remove(&id); Ok(()) } diff --git a/src/cargo/core/resolver/resolve.rs b/src/cargo/core/resolver/resolve.rs index d77ff9f3590..ca3ee43a2ba 100644 --- a/src/cargo/core/resolver/resolve.rs +++ b/src/cargo/core/resolver/resolve.rs @@ -84,13 +84,14 @@ pub enum ResolveVersion { /// branch specifiers. /// /// * Introduced in 2020 in version 1.47. - /// * New lockfiles use V3 by default starting in 1.53. + /// * New lockfiles use V3 by default from in 1.53 to 1.82. V3, /// SourceId URL serialization is aware of URL encoding. For example, /// `?branch=foo bar` is now encoded as `?branch=foo+bar` and can be decoded /// back and forth correctly. /// /// * Introduced in 2024 in version 1.78. + /// * New lockfiles use V4 by default starting in 1.83. V4, /// Unstable. Will collect a certain amount of changes and then go. /// @@ -107,7 +108,7 @@ impl ResolveVersion { /// Update this and the description of enum variants of [`ResolveVersion`] /// when we're changing the default lockfile version. fn default() -> ResolveVersion { - ResolveVersion::V3 + ResolveVersion::V4 } /// The maximum version of lockfile made into the stable channel. @@ -125,28 +126,23 @@ impl ResolveVersion { return ResolveVersion::default(); }; - let rust_1_41 = PartialVersion { - major: 1, - minor: Some(41), - patch: None, - pre: None, - build: None, - } - .try_into() - .expect("PartialVersion 1.41"); - let rust_1_53 = PartialVersion { - major: 1, - minor: Some(53), - patch: None, - pre: None, - build: None, - } - .try_into() - .expect("PartialVersion 1.53"); + let rust = |major, minor| -> RustVersion { + PartialVersion { + major, + minor: Some(minor), + patch: None, + pre: None, + build: None, + } + .try_into() + .unwrap() + }; - if rust_version >= &rust_1_53 { + if rust_version >= &rust(1, 83) { + ResolveVersion::V4 + } else if rust_version >= &rust(1, 53) { ResolveVersion::V3 - } else if rust_version >= &rust_1_41 { + } else if rust_version >= &rust(1, 41) { ResolveVersion::V2 } else { ResolveVersion::V1 @@ -205,7 +201,7 @@ impl Resolve { self.graph.path_to_top(pkg) } - pub fn register_used_patches(&mut self, patches: &[Summary]) { + pub fn register_used_patches<'a>(&mut self, patches: impl Iterator) { for summary in patches { if !self.graph.contains(&summary.package_id()) { self.unused_patches.push(summary.package_id()) @@ -324,6 +320,10 @@ unable to verify that `{0}` is the same as when the lockfile was generated self.graph.iter().cloned() } + pub fn len(&self) -> usize { + self.graph.len() + } + pub fn deps(&self, pkg: PackageId) -> impl Iterator)> { self.deps_not_replaced(pkg) .map(move |(id, deps)| (self.replacement(id).unwrap_or(id), deps)) @@ -336,6 +336,19 @@ unable to verify that `{0}` is the same as when the lockfile was generated self.graph.edges(&pkg).map(|(id, deps)| (*id, deps)) } + // Only edges that are transitive, filtering out edges between a crate and its dev-dependency + // since that doesn't count for cycles. + pub fn transitive_deps_not_replaced( + &self, + pkg: PackageId, + ) -> impl Iterator { + self.graph.edges(&pkg).filter_map(|(id, deps)| { + deps.iter() + .find(|d| d.is_transitive()) + .map(|transitive_dep| (*id, transitive_dep)) + }) + } + pub fn replacement(&self, pkg: PackageId) -> Option { self.replacements.get(&pkg).cloned() } diff --git a/src/cargo/core/resolver/types.rs b/src/cargo/core/resolver/types.rs index 67fac19aa59..36def5a964d 100644 --- a/src/cargo/core/resolver/types.rs +++ b/src/cargo/core/resolver/types.rs @@ -300,12 +300,6 @@ pub enum ConflictReason { /// A dependency listed a feature for an optional dependency, but that /// optional dependency is "hidden" using namespaced `dep:` syntax. NonImplicitDependencyAsFeature(InternedString), - - // TODO: needs more info for `activation_error` - // TODO: needs more info for `find_candidate` - /// pub dep error - PublicDependency(PackageId), - PubliclyExports(PackageId), } impl ConflictReason { @@ -320,13 +314,6 @@ impl ConflictReason { pub fn is_required_dependency_as_features(&self) -> bool { matches!(self, ConflictReason::RequiredDependencyAsFeature(_)) } - - pub fn is_public_dependency(&self) -> bool { - matches!( - self, - ConflictReason::PublicDependency(_) | ConflictReason::PubliclyExports(_) - ) - } } /// A list of packages that have gotten in the way of resolving a dependency. diff --git a/src/cargo/core/resolver/version_prefs.rs b/src/cargo/core/resolver/version_prefs.rs index 5e6cc230ffb..61f17404936 100644 --- a/src/cargo/core/resolver/version_prefs.rs +++ b/src/cargo/core/resolver/version_prefs.rs @@ -21,7 +21,7 @@ pub struct VersionPreferences { try_to_use: HashSet, prefer_patch_deps: HashMap>, version_ordering: VersionOrdering, - max_rust_version: Option, + rust_versions: Vec, } #[derive(Copy, Clone, Default, PartialEq, Eq, Hash, Debug)] @@ -49,8 +49,8 @@ impl VersionPreferences { self.version_ordering = ordering; } - pub fn max_rust_version(&mut self, ver: Option) { - self.max_rust_version = ver; + pub fn rust_versions(&mut self, vers: Vec) { + self.rust_versions = vers; } /// Sort (and filter) the given vector of summaries in-place @@ -59,7 +59,7 @@ impl VersionPreferences { /// /// Sort order: /// 1. Preferred packages - /// 2. [`VersionPreferences::max_rust_version`] + /// 2. Most compatible [`VersionPreferences::rust_versions`] /// 3. `first_version`, falling back to [`VersionPreferences::version_ordering`] when `None` /// /// Filtering: @@ -85,37 +85,11 @@ impl VersionPreferences { return previous_cmp; } - if let Some(max_rust_version) = &self.max_rust_version { - match (a.rust_version(), b.rust_version()) { - // Fallback - (None, None) => {} - (Some(a), Some(b)) if a == b => {} - // Primary comparison - (Some(a), Some(b)) => { - let a_is_compat = a.is_compatible_with(max_rust_version); - let b_is_compat = b.is_compatible_with(max_rust_version); - match (a_is_compat, b_is_compat) { - (true, true) => {} // fallback - (false, false) => {} // fallback - (true, false) => return Ordering::Less, - (false, true) => return Ordering::Greater, - } - } - // Prioritize `None` over incompatible - (None, Some(b)) => { - if b.is_compatible_with(max_rust_version) { - return Ordering::Greater; - } else { - return Ordering::Less; - } - } - (Some(a), None) => { - if a.is_compatible_with(max_rust_version) { - return Ordering::Less; - } else { - return Ordering::Greater; - } - } + if !self.rust_versions.is_empty() { + let a_compat_count = self.msrv_compat_count(a); + let b_compat_count = self.msrv_compat_count(b); + if b_compat_count != a_compat_count { + return b_compat_count.cmp(&a_compat_count); } } @@ -129,6 +103,17 @@ impl VersionPreferences { let _ = summaries.split_off(1); } } + + fn msrv_compat_count(&self, summary: &Summary) -> usize { + let Some(rust_version) = summary.rust_version() else { + return self.rust_versions.len(); + }; + + self.rust_versions + .iter() + .filter(|max| rust_version.is_compatible_with(max)) + .count() + } } #[cfg(test)] @@ -253,9 +238,41 @@ mod test { } #[test] - fn test_max_rust_version() { + fn test_single_rust_version() { + let mut vp = VersionPreferences::default(); + vp.rust_versions(vec!["1.50".parse().unwrap()]); + + let mut summaries = vec![ + summ("foo", "1.2.4", None), + summ("foo", "1.2.3", Some("1.60")), + summ("foo", "1.2.2", None), + summ("foo", "1.2.1", Some("1.50")), + summ("foo", "1.2.0", None), + summ("foo", "1.1.0", Some("1.40")), + summ("foo", "1.0.9", None), + ]; + + vp.version_ordering(VersionOrdering::MaximumVersionsFirst); + vp.sort_summaries(&mut summaries, None); + assert_eq!( + describe(&summaries), + "foo/1.2.4, foo/1.2.2, foo/1.2.1, foo/1.2.0, foo/1.1.0, foo/1.0.9, foo/1.2.3" + .to_string() + ); + + vp.version_ordering(VersionOrdering::MinimumVersionsFirst); + vp.sort_summaries(&mut summaries, None); + assert_eq!( + describe(&summaries), + "foo/1.0.9, foo/1.1.0, foo/1.2.0, foo/1.2.1, foo/1.2.2, foo/1.2.4, foo/1.2.3" + .to_string() + ); + } + + #[test] + fn test_multiple_rust_versions() { let mut vp = VersionPreferences::default(); - vp.max_rust_version(Some("1.50".parse().unwrap())); + vp.rust_versions(vec!["1.45".parse().unwrap(), "1.55".parse().unwrap()]); let mut summaries = vec![ summ("foo", "1.2.4", None), @@ -271,7 +288,7 @@ mod test { vp.sort_summaries(&mut summaries, None); assert_eq!( describe(&summaries), - "foo/1.2.1, foo/1.1.0, foo/1.2.4, foo/1.2.2, foo/1.2.0, foo/1.0.9, foo/1.2.3" + "foo/1.2.4, foo/1.2.2, foo/1.2.0, foo/1.1.0, foo/1.0.9, foo/1.2.1, foo/1.2.3" .to_string() ); @@ -279,7 +296,7 @@ mod test { vp.sort_summaries(&mut summaries, None); assert_eq!( describe(&summaries), - "foo/1.1.0, foo/1.2.1, foo/1.0.9, foo/1.2.0, foo/1.2.2, foo/1.2.4, foo/1.2.3" + "foo/1.0.9, foo/1.1.0, foo/1.2.0, foo/1.2.2, foo/1.2.4, foo/1.2.1, foo/1.2.3" .to_string() ); } diff --git a/src/cargo/core/shell.rs b/src/cargo/core/shell.rs index ff4bb3d2d9e..174986ee446 100644 --- a/src/cargo/core/shell.rs +++ b/src/cargo/core/shell.rs @@ -2,6 +2,7 @@ use std::fmt; use std::io::prelude::*; use std::io::IsTerminal; +use annotate_snippets::{Message, Renderer}; use anstream::AutoStream; use anstyle::Style; @@ -391,6 +392,19 @@ impl Shell { drop(writeln!(self.out(), "{}", encoded)); Ok(()) } + + /// Prints the passed in [Message] to stderr + pub fn print_message(&mut self, message: Message<'_>) -> std::io::Result<()> { + let term_width = self + .err_width() + .diagnostic_terminal_width() + .unwrap_or(annotate_snippets::renderer::DEFAULT_TERM_WIDTH); + writeln!( + self.err(), + "{}", + Renderer::styled().term_width(term_width).render(message) + ) + } } impl Default for Shell { @@ -648,7 +662,7 @@ mod imp { ptr::null_mut(), OPEN_EXISTING, 0, - 0, + std::ptr::null_mut(), ); if h == INVALID_HANDLE_VALUE { return TtyWidth::NoTty; diff --git a/src/cargo/core/source_id.rs b/src/cargo/core/source_id.rs index d03a0a5769c..53eb7f16f7e 100644 --- a/src/cargo/core/source_id.rs +++ b/src/cargo/core/source_id.rs @@ -468,34 +468,33 @@ impl SourceId { /// Creates a new `SourceId` from this source with the given `precise`. pub fn with_git_precise(self, fragment: Option) -> SourceId { - SourceId::wrap(SourceIdInner { - precise: fragment.map(|f| Precise::GitUrlFragment(f)), - ..(*self.inner).clone() - }) + self.with_precise(&fragment.map(|f| Precise::GitUrlFragment(f))) } /// Creates a new `SourceId` from this source without a `precise`. pub fn without_precise(self) -> SourceId { - SourceId::wrap(SourceIdInner { - precise: None, - ..(*self.inner).clone() - }) + self.with_precise(&None) } /// Creates a new `SourceId` from this source without a `precise`. pub fn with_locked_precise(self) -> SourceId { - SourceId::wrap(SourceIdInner { - precise: Some(Precise::Locked), - ..(*self.inner).clone() - }) + self.with_precise(&Some(Precise::Locked)) } /// Creates a new `SourceId` from this source with the `precise` from some other `SourceId`. pub fn with_precise_from(self, v: Self) -> SourceId { - SourceId::wrap(SourceIdInner { - precise: v.inner.precise.clone(), - ..(*self.inner).clone() - }) + self.with_precise(&v.inner.precise) + } + + fn with_precise(self, precise: &Option) -> SourceId { + if &self.inner.precise == precise { + self + } else { + SourceId::wrap(SourceIdInner { + precise: precise.clone(), + ..(*self.inner).clone() + }) + } } /// When updating a lock file on a version using `cargo update --precise` @@ -645,10 +644,7 @@ impl fmt::Display for SourceId { // Don't replace the URL display for git references, // because those are kind of expected to be URLs. write!(f, "{}", self.inner.url)?; - // TODO(-Znext-lockfile-bump): set it to true when the default is - // lockfile v4, because we want Source ID serialization to be - // consistent with lockfile. - if let Some(pretty) = reference.pretty_ref(false) { + if let Some(pretty) = reference.pretty_ref(true) { write!(f, "?{}", pretty)?; } diff --git a/src/cargo/core/summary.rs b/src/cargo/core/summary.rs index ec0197cf40d..1b07bcb9b4c 100644 --- a/src/cargo/core/summary.rs +++ b/src/cargo/core/summary.rs @@ -9,7 +9,7 @@ use std::collections::{BTreeMap, HashMap, HashSet}; use std::fmt; use std::hash::{Hash, Hasher}; use std::mem; -use std::rc::Rc; +use std::sync::Arc; /// Subset of a `Manifest`. Contains only the most important information about /// a package. @@ -17,19 +17,48 @@ use std::rc::Rc; /// Summaries are cloned, and should not be mutated after creation #[derive(Debug, Clone)] pub struct Summary { - inner: Rc, + inner: Arc, } #[derive(Debug, Clone)] struct Inner { package_id: PackageId, dependencies: Vec, - features: Rc, + features: Arc, checksum: Option, links: Option, rust_version: Option, } +/// Indicates the dependency inferred from the `dep` syntax that should exist, +/// but missing on the resolved dependencies tables. +#[derive(Debug)] +pub struct MissingDependencyError { + pub dep_name: InternedString, + pub feature: InternedString, + pub feature_value: FeatureValue, + /// Indicates the dependency inferred from the `dep?` syntax that is weak optional + pub weak_optional: bool, +} + +impl std::error::Error for MissingDependencyError {} + +impl fmt::Display for MissingDependencyError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let Self { + dep_name, + feature, + feature_value: fv, + .. + } = self; + + write!( + f, + "feature `{feature}` includes `{fv}`, but `{dep_name}` is not a dependency", + ) + } +} + impl Summary { #[tracing::instrument(skip_all)] pub fn new( @@ -53,10 +82,10 @@ impl Summary { } let feature_map = build_feature_map(features, &dependencies)?; Ok(Summary { - inner: Rc::new(Inner { + inner: Arc::new(Inner { package_id: pkg_id, dependencies, - features: Rc::new(feature_map), + features: Arc::new(feature_map), checksum: None, links: links.map(|l| l.into()), rust_version, @@ -95,23 +124,33 @@ impl Summary { } pub fn override_id(mut self, id: PackageId) -> Summary { - Rc::make_mut(&mut self.inner).package_id = id; + Arc::make_mut(&mut self.inner).package_id = id; self } pub fn set_checksum(&mut self, cksum: String) { - Rc::make_mut(&mut self.inner).checksum = Some(cksum); + Arc::make_mut(&mut self.inner).checksum = Some(cksum); } - pub fn map_dependencies(mut self, f: F) -> Summary + pub fn map_dependencies(self, mut f: F) -> Summary where F: FnMut(Dependency) -> Dependency, + { + self.try_map_dependencies(|dep| Ok(f(dep))).unwrap() + } + + pub fn try_map_dependencies(mut self, f: F) -> CargoResult + where + F: FnMut(Dependency) -> CargoResult, { { - let slot = &mut Rc::make_mut(&mut self.inner).dependencies; - *slot = mem::take(slot).into_iter().map(f).collect(); + let slot = &mut Arc::make_mut(&mut self.inner).dependencies; + *slot = mem::take(slot) + .into_iter() + .map(f) + .collect::>()?; } - self + Ok(self) } pub fn map_source(self, to_replace: SourceId, replace_with: SourceId) -> Summary { @@ -139,6 +178,12 @@ impl Hash for Summary { } } +// A check that only compiles if Summary is Sync +const _: fn() = || { + fn is_sync() {} + is_sync::(); +}; + /// Checks features for errors, bailing out a CargoResult:Err if invalid, /// and creates FeatureValues for each feature. fn build_feature_map( @@ -146,13 +191,12 @@ fn build_feature_map( dependencies: &[Dependency], ) -> CargoResult { use self::FeatureValue::*; - let mut dep_map = HashMap::new(); + // A map of dependency names to whether there are any that are optional. + let mut dep_map: HashMap = HashMap::new(); for dep in dependencies.iter() { - dep_map - .entry(dep.name_in_toml()) - .or_insert_with(Vec::new) - .push(dep); + *dep_map.entry(dep.name_in_toml()).or_insert(false) |= dep.is_optional(); } + let dep_map = dep_map; // We are done mutating this variable let mut map: FeatureMap = features .iter() @@ -170,91 +214,62 @@ fn build_feature_map( let explicitly_listed: HashSet<_> = map .values() .flatten() - .filter_map(|fv| match fv { - Dep { dep_name } => Some(*dep_name), - _ => None, - }) + .filter_map(|fv| fv.explicit_dep_name()) .collect(); + for dep in dependencies { if !dep.is_optional() { continue; } - let dep_name_in_toml = dep.name_in_toml(); - if features.contains_key(&dep_name_in_toml) || explicitly_listed.contains(&dep_name_in_toml) - { + let dep_name = dep.name_in_toml(); + if features.contains_key(&dep_name) || explicitly_listed.contains(&dep_name) { continue; } - let fv = Dep { - dep_name: dep_name_in_toml, - }; - map.insert(dep_name_in_toml, vec![fv]); + map.insert(dep_name, vec![Dep { dep_name }]); } + let map = map; // We are done mutating this variable // Validate features are listed properly. for (feature, fvs) in &map { FeatureName::new(feature)?; for fv in fvs { // Find data for the referenced dependency... - let dep_data = { - match fv { - Feature(dep_name) | Dep { dep_name, .. } | DepFeature { dep_name, .. } => { - dep_map.get(dep_name) - } - } - }; - let is_optional_dep = dep_data - .iter() - .flat_map(|d| d.iter()) - .any(|d| d.is_optional()); + let dep_data = dep_map.get(&fv.feature_or_dep_name()); let is_any_dep = dep_data.is_some(); + let is_optional_dep = dep_data.is_some_and(|&o| o); match fv { Feature(f) => { if !features.contains_key(f) { if !is_any_dep { bail!( - "feature `{}` includes `{}` which is neither a dependency \ - nor another feature", - feature, - fv - ); + "feature `{feature}` includes `{fv}` which is neither a dependency \ + nor another feature" + ); } if is_optional_dep { if !map.contains_key(f) { bail!( - "feature `{}` includes `{}`, but `{}` is an \ + "feature `{feature}` includes `{fv}`, but `{f}` is an \ optional dependency without an implicit feature\n\ - Use `dep:{}` to enable the dependency.", - feature, - fv, - f, - f + Use `dep:{f}` to enable the dependency." ); } } else { - bail!("feature `{}` includes `{}`, but `{}` is not an optional dependency\n\ + bail!("feature `{feature}` includes `{fv}`, but `{f}` is not an optional dependency\n\ A non-optional dependency of the same name is defined; \ - consider adding `optional = true` to its definition.", - feature, fv, f); + consider adding `optional = true` to its definition."); } } } Dep { dep_name } => { if !is_any_dep { - bail!( - "feature `{}` includes `{}`, but `{}` is not listed as a dependency", - feature, - fv, - dep_name - ); + bail!("feature `{feature}` includes `{fv}`, but `{dep_name}` is not listed as a dependency"); } if !is_optional_dep { bail!( - "feature `{}` includes `{}`, but `{}` is not an optional dependency\n\ + "feature `{feature}` includes `{fv}`, but `{dep_name}` is not an optional dependency\n\ A non-optional dependency of the same name is defined; \ - consider adding `optional = true` to its definition.", - feature, - fv, - dep_name + consider adding `optional = true` to its definition." ); } } @@ -262,25 +277,16 @@ fn build_feature_map( dep_name, dep_feature, weak, - .. } => { // Early check for some unlikely syntax. if dep_feature.contains('/') { - bail!( - "multiple slashes in feature `{}` (included by feature `{}`) are not allowed", - fv, - feature - ); + bail!("multiple slashes in feature `{fv}` (included by feature `{feature}`) are not allowed"); } // dep: cannot be combined with / if let Some(stripped_dep) = dep_name.strip_prefix("dep:") { let has_other_dep = explicitly_listed.contains(stripped_dep); - let is_optional = dep_map - .get(stripped_dep) - .iter() - .flat_map(|d| d.iter()) - .any(|d| d.is_optional()); + let is_optional = dep_map.get(stripped_dep).is_some_and(|&o| o); let extra_help = if *weak || has_other_dep || !is_optional { // In this case, the user should just remove dep:. // Note that "hiding" an optional dependency @@ -304,18 +310,19 @@ fn build_feature_map( // Validation of the feature name will be performed in the resolver. if !is_any_dep { - bail!( - "feature `{}` includes `{}`, but `{}` is not a dependency", - feature, - fv, - dep_name - ); + bail!(MissingDependencyError { + feature: *feature, + feature_value: (*fv).clone(), + dep_name: *dep_name, + weak_optional: *weak, + }) } if *weak && !is_optional_dep { - bail!("feature `{}` includes `{}` with a `?`, but `{}` is not an optional dependency\n\ + bail!( + "feature `{feature}` includes `{fv}` with a `?`, but `{dep_name}` is not an optional dependency\n\ A non-optional dependency of the same name is defined; \ - consider removing the `?` or changing the dependency to be optional", - feature, fv, dep_name); + consider removing the `?` or changing the dependency to be optional" + ); } } } @@ -331,15 +338,13 @@ fn build_feature_map( _ => None, }) .collect(); - if let Some(dep) = dependencies + if let Some((dep, _)) = dep_map .iter() - .find(|dep| dep.is_optional() && !used.contains(&dep.name_in_toml())) + .find(|&(dep, &is_optional)| is_optional && !used.contains(dep)) { bail!( - "optional dependency `{}` is not included in any feature\n\ - Make sure that `dep:{}` is included in one of features in the [features] table.", - dep.name_in_toml(), - dep.name_in_toml(), + "optional dependency `{dep}` is not included in any feature\n\ + Make sure that `dep:{dep}` is included in one of features in the [features] table." ); } @@ -366,19 +371,13 @@ pub enum FeatureValue { impl FeatureValue { pub fn new(feature: InternedString) -> FeatureValue { - match feature.find('/') { - Some(pos) => { - let (dep, dep_feat) = feature.split_at(pos); - let dep_feat = &dep_feat[1..]; - let (dep, weak) = if let Some(dep) = dep.strip_suffix('?') { - (dep, true) - } else { - (dep, false) - }; + match feature.split_once('/') { + Some((dep, dep_feat)) => { + let dep_name = dep.strip_suffix('?'); FeatureValue::DepFeature { - dep_name: InternedString::new(dep), + dep_name: InternedString::new(dep_name.unwrap_or(dep)), dep_feature: InternedString::new(dep_feat), - weak, + weak: dep_name.is_some(), } } None => { @@ -393,9 +392,20 @@ impl FeatureValue { } } - /// Returns `true` if this feature explicitly used `dep:` syntax. - pub fn has_dep_prefix(&self) -> bool { - matches!(self, FeatureValue::Dep { .. }) + /// Returns the name of the dependency if and only if it was explicitly named with the `dep:` syntax. + fn explicit_dep_name(&self) -> Option { + match self { + FeatureValue::Dep { dep_name, .. } => Some(*dep_name), + _ => None, + } + } + + fn feature_or_dep_name(&self) -> InternedString { + match self { + FeatureValue::Feature(dep_name) + | FeatureValue::Dep { dep_name, .. } + | FeatureValue::DepFeature { dep_name, .. } => *dep_name, + } } } @@ -403,15 +413,15 @@ impl fmt::Display for FeatureValue { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { use self::FeatureValue::*; match self { - Feature(feat) => write!(f, "{}", feat), - Dep { dep_name } => write!(f, "dep:{}", dep_name), + Feature(feat) => write!(f, "{feat}"), + Dep { dep_name } => write!(f, "dep:{dep_name}"), DepFeature { dep_name, dep_feature, weak, } => { let weak = if *weak { "?" } else { "" }; - write!(f, "{}{}/{}", dep_name, weak, dep_feature) + write!(f, "{dep_name}{weak}/{dep_feature}") } } } diff --git a/src/cargo/core/workspace.rs b/src/cargo/core/workspace.rs index 2ef3a1a1a75..ad4ecc3e25f 100644 --- a/src/cargo/core/workspace.rs +++ b/src/cargo/core/workspace.rs @@ -20,14 +20,14 @@ use crate::core::{ }; use crate::core::{EitherManifest, Package, SourceId, VirtualManifest}; use crate::ops; -use crate::sources::{PathSource, CRATES_IO_INDEX, CRATES_IO_REGISTRY}; +use crate::sources::{PathSource, SourceConfigMap, CRATES_IO_INDEX, CRATES_IO_REGISTRY}; use crate::util::edit_distance; use crate::util::errors::{CargoResult, ManifestError}; use crate::util::interning::InternedString; -use crate::util::lints::{check_im_a_teapot, check_implicit_features, unused_dependencies}; +use crate::util::lints::{analyze_cargo_lints_table, check_im_a_teapot}; use crate::util::toml::{read_manifest, InheritableFields}; use crate::util::{ - context::CargoResolverConfig, context::CargoResolverPrecedence, context::ConfigRelativePath, + context::CargoResolverConfig, context::ConfigRelativePath, context::IncompatibleRustVersions, Filesystem, GlobalContext, IntoUrl, }; use cargo_util::paths; @@ -44,69 +44,80 @@ use pathdiff::diff_paths; /// package is loaded and/or learned about. #[derive(Debug)] pub struct Workspace<'gctx> { + /// Cargo configuration information. See [`GlobalContext`]. gctx: &'gctx GlobalContext, - // This path is a path to where the current cargo subcommand was invoked - // from. That is the `--manifest-path` argument to Cargo, and - // points to the "main crate" that we're going to worry about. + /// This path is a path to where the current cargo subcommand was invoked + /// from. That is the `--manifest-path` argument to Cargo, and + /// points to the "main crate" that we're going to worry about. current_manifest: PathBuf, - // A list of packages found in this workspace. Always includes at least the - // package mentioned by `current_manifest`. + /// A list of packages found in this workspace. Always includes at least the + /// package mentioned by `current_manifest`. packages: Packages<'gctx>, - // If this workspace includes more than one crate, this points to the root - // of the workspace. This is `None` in the case that `[workspace]` is - // missing, `package.workspace` is missing, and no `Cargo.toml` above - // `current_manifest` was found on the filesystem with `[workspace]`. + /// If this workspace includes more than one crate, this points to the root + /// of the workspace. This is `None` in the case that `[workspace]` is + /// missing, `package.workspace` is missing, and no `Cargo.toml` above + /// `current_manifest` was found on the filesystem with `[workspace]`. root_manifest: Option, - // Shared target directory for all the packages of this workspace. - // `None` if the default path of `root/target` should be used. + /// Shared target directory for all the packages of this workspace. + /// `None` if the default path of `root/target` should be used. target_dir: Option, - // List of members in this workspace with a listing of all their manifest - // paths. The packages themselves can be looked up through the `packages` - // set above. + /// List of members in this workspace with a listing of all their manifest + /// paths. The packages themselves can be looked up through the `packages` + /// set above. members: Vec, + /// Set of ids of workspace members member_ids: HashSet, - // The subset of `members` that are used by the - // `build`, `check`, `test`, and `bench` subcommands - // when no package is selected with `--package` / `-p` and `--workspace` - // is not used. - // - // This is set by the `default-members` config - // in the `[workspace]` section. - // When unset, this is the same as `members` for virtual workspaces - // (`--workspace` is implied) - // or only the root package for non-virtual workspaces. + /// The subset of `members` that are used by the + /// `build`, `check`, `test`, and `bench` subcommands + /// when no package is selected with `--package` / `-p` and `--workspace` + /// is not used. + /// + /// This is set by the `default-members` config + /// in the `[workspace]` section. + /// When unset, this is the same as `members` for virtual workspaces + /// (`--workspace` is implied) + /// or only the root package for non-virtual workspaces. default_members: Vec, - // `true` if this is a temporary workspace created for the purposes of the - // `cargo install` or `cargo package` commands. + /// `true` if this is a temporary workspace created for the purposes of the + /// `cargo install` or `cargo package` commands. is_ephemeral: bool, - // `true` if this workspace should enforce optional dependencies even when - // not needed; false if this workspace should only enforce dependencies - // needed by the current configuration (such as in cargo install). In some - // cases `false` also results in the non-enforcement of dev-dependencies. + /// `true` if this workspace should enforce optional dependencies even when + /// not needed; false if this workspace should only enforce dependencies + /// needed by the current configuration (such as in cargo install). In some + /// cases `false` also results in the non-enforcement of dev-dependencies. require_optional_deps: bool, - // A cache of loaded packages for particular paths which is disjoint from - // `packages` up above, used in the `load` method down below. + /// A cache of loaded packages for particular paths which is disjoint from + /// `packages` up above, used in the `load` method down below. loaded_packages: RefCell>, - // If `true`, then the resolver will ignore any existing `Cargo.lock` - // file. This is set for `cargo install` without `--locked`. + /// If `true`, then the resolver will ignore any existing `Cargo.lock` + /// file. This is set for `cargo install` without `--locked`. ignore_lock: bool, + /// Requested path of the lockfile (i.e. passed as the cli flag) + requested_lockfile_path: Option, + /// The resolver behavior specified with the `resolver` field. resolve_behavior: ResolveBehavior, + /// If `true`, then workspace `rust_version` would be used in `cargo resolve` + /// and other places that use rust version. + /// This is set based on the resolver version, config settings, and CLI flags. resolve_honors_rust_version: bool, /// Workspace-level custom metadata custom_metadata: Option, + + /// Local overlay configuration. See [`crate::sources::overlay`]. + local_overlays: HashMap, } // Separate structure for tracking loaded packages (to avoid loading anything @@ -232,31 +243,14 @@ impl<'gctx> Workspace<'gctx> { require_optional_deps: true, loaded_packages: RefCell::new(HashMap::new()), ignore_lock: false, + requested_lockfile_path: None, resolve_behavior: ResolveBehavior::V1, resolve_honors_rust_version: false, custom_metadata: None, + local_overlays: HashMap::new(), } } - pub fn new_virtual( - root_path: PathBuf, - current_manifest: PathBuf, - manifest: VirtualManifest, - gctx: &'gctx GlobalContext, - ) -> CargoResult> { - let mut ws = Workspace::new_default(current_manifest, gctx); - ws.root_manifest = Some(root_path.join("Cargo.toml")); - ws.target_dir = gctx.target_dir()?; - ws.packages - .packages - .insert(root_path, MaybePackage::Virtual(manifest)); - ws.find_members()?; - ws.set_resolve_behavior()?; - // TODO: validation does not work because it walks up the directory - // tree looking for the root which is a fake file that doesn't exist. - Ok(ws) - } - /// Creates a "temporary workspace" from one package which only contains /// that package. /// @@ -314,11 +308,11 @@ impl<'gctx> Workspace<'gctx> { } match self.gctx().get::("resolver") { Ok(CargoResolverConfig { - something_like_precedence: Some(precedence), + incompatible_rust_versions: Some(incompatible_rust_versions), }) => { if self.gctx().cli_unstable().msrv_policy { self.resolve_honors_rust_version = - precedence == CargoResolverPrecedence::SomethingLikeRustVersion; + incompatible_rust_versions == IncompatibleRustVersions::Fallback; } else { self.gctx() .shell() @@ -326,7 +320,7 @@ impl<'gctx> Workspace<'gctx> { } } Ok(CargoResolverConfig { - something_like_precedence: None, + incompatible_rust_versions: None, }) => {} Err(err) => { if self.gctx().cli_unstable().msrv_policy { @@ -581,7 +575,7 @@ impl<'gctx> Workspace<'gctx> { } /// Returns an iterator over default packages in this workspace - pub fn default_members<'a>(&'a self) -> impl Iterator { + pub fn default_members<'a>(&'a self) -> impl Iterator { let packages = &self.packages; self.default_members .iter() @@ -641,9 +635,38 @@ impl<'gctx> Workspace<'gctx> { self } + /// Returns the directory where the lockfile is in. + pub fn lock_root(&self) -> Filesystem { + if let Some(requested) = self.requested_lockfile_path.as_ref() { + return Filesystem::new( + requested + .parent() + .expect("Lockfile path can't be root") + .to_owned(), + ); + } + self.default_lock_root() + } + + fn default_lock_root(&self) -> Filesystem { + if self.root_maybe().is_embedded() { + self.target_dir() + } else { + Filesystem::new(self.root().to_owned()) + } + } + + pub fn set_requested_lockfile_path(&mut self, path: Option) { + self.requested_lockfile_path = path; + } + + pub fn requested_lockfile_path(&self) -> Option<&Path> { + self.requested_lockfile_path.as_deref() + } + /// Get the lowest-common denominator `package.rust-version` within the workspace, if specified /// anywhere - pub fn rust_version(&self) -> Option<&RustVersion> { + pub fn lowest_rust_version(&self) -> Option<&RustVersion> { self.members().filter_map(|pkg| pkg.rust_version()).min() } @@ -717,6 +740,7 @@ impl<'gctx> Workspace<'gctx> { /// verifies that those are all valid packages to point to. Otherwise, this /// will transitively follow all `path` dependencies looking for members of /// the workspace. + #[tracing::instrument(skip_all)] fn find_members(&mut self) -> CargoResult<()> { let Some(workspace_config) = self.load_workspace_config()? else { debug!("find_members - only me as a member"); @@ -880,6 +904,7 @@ impl<'gctx> Workspace<'gctx> { /// 1. A workspace only has one root. /// 2. All workspace members agree on this one root as the root. /// 3. The current crate is a member of this workspace. + #[tracing::instrument(skip_all)] fn validate(&mut self) -> CargoResult<()> { // The rest of the checks require a VirtualManifest or multiple members. if self.root_manifest.is_none() { @@ -948,6 +973,7 @@ impl<'gctx> Workspace<'gctx> { } } + #[tracing::instrument(skip_all)] fn validate_members(&mut self) -> CargoResult<()> { for member in self.members.clone() { let root = self.find_root(&member)?; @@ -1057,7 +1083,7 @@ impl<'gctx> Workspace<'gctx> { ); self.gctx.shell().warn(&msg) }; - if manifest.resolved_toml().has_profiles() { + if manifest.normalized_toml().has_profiles() { emit_warning("profiles")?; } if !manifest.replace().is_empty() { @@ -1140,8 +1166,7 @@ impl<'gctx> Workspace<'gctx> { MaybePackage::Package(ref p) => p.clone(), MaybePackage::Virtual(_) => continue, }; - let mut src = PathSource::new(pkg.root(), pkg.package_id().source_id(), self.gctx); - src.preload_with(pkg); + let src = PathSource::preload_with(pkg, self.gctx); registry.add_preloaded(Box::new(src)); } } @@ -1180,25 +1205,10 @@ impl<'gctx> Workspace<'gctx> { } pub fn emit_lints(&self, pkg: &Package, path: &Path) -> CargoResult<()> { - let ws_lints = self - .root_maybe() - .workspace_config() - .inheritable() - .and_then(|i| i.lints().ok()) - .unwrap_or_default(); - - let ws_cargo_lints = ws_lints - .get("cargo") - .cloned() - .unwrap_or_default() - .into_iter() - .map(|(k, v)| (k.replace('-', "_"), v)) - .collect(); - let mut error_count = 0; let toml_lints = pkg .manifest() - .resolved_toml() + .normalized_toml() .lints .clone() .map(|lints| lints.lints) @@ -1207,35 +1217,27 @@ impl<'gctx> Workspace<'gctx> { .get("cargo") .cloned() .unwrap_or(manifest::TomlToolLints::default()); - let normalized_lints = cargo_lints - .into_iter() - .map(|(name, lint)| (name.replace('-', "_"), lint)) - .collect(); - check_im_a_teapot( - pkg, - &path, - &normalized_lints, - &ws_cargo_lints, - &mut error_count, - self.gctx, - )?; - check_implicit_features( - pkg, - &path, - &normalized_lints, - &ws_cargo_lints, - &mut error_count, - self.gctx, - )?; - unused_dependencies( + let ws_contents = match self.root_maybe() { + MaybePackage::Package(pkg) => pkg.manifest().contents(), + MaybePackage::Virtual(v) => v.contents(), + }; + + let ws_document = match self.root_maybe() { + MaybePackage::Package(pkg) => pkg.manifest().document(), + MaybePackage::Virtual(v) => v.document(), + }; + + analyze_cargo_lints_table( pkg, &path, - &normalized_lints, - &ws_cargo_lints, - &mut error_count, + &cargo_lints, + ws_contents, + ws_document, + self.root_manifest(), self.gctx, )?; + check_im_a_teapot(pkg, &path, &cargo_lints, &mut error_count, self.gctx)?; if error_count > 0 { Err(crate::util::errors::AlreadyPrintedError::new(anyhow!( "encountered {error_count} errors(s) while running lints" @@ -1361,12 +1363,12 @@ impl<'gctx> Workspace<'gctx> { } } - fn report_unknown_features_error( + fn missing_feature_spelling_suggestions( &self, - specs: &[PackageIdSpec], + selected_members: &[&Package], cli_features: &CliFeatures, found_features: &BTreeSet, - ) -> CargoResult<()> { + ) -> Vec { // Keeps track of which features were contained in summary of `member` to suggest similar features in errors let mut summary_features: Vec = Default::default(); @@ -1385,10 +1387,7 @@ impl<'gctx> Workspace<'gctx> { let mut optional_dependency_names_per_member: BTreeMap<&Package, BTreeSet> = Default::default(); - for member in self - .members() - .filter(|m| specs.iter().any(|spec| spec.matches(m.package_id()))) - { + for &member in selected_members { // Only include features this member defines. let summary = member.summary(); @@ -1426,7 +1425,7 @@ impl<'gctx> Workspace<'gctx> { edit_distance(a.as_str(), b.as_str(), 3).is_some() }; - let suggestions: Vec<_> = cli_features + cli_features .features .difference(found_features) .map(|feature| match feature { @@ -1520,8 +1519,15 @@ impl<'gctx> Workspace<'gctx> { }) .sorted() .take(5) - .collect(); + .collect() + } + fn report_unknown_features_error( + &self, + specs: &[PackageIdSpec], + cli_features: &CliFeatures, + found_features: &BTreeSet, + ) -> CargoResult<()> { let unknown: Vec<_> = cli_features .features .difference(found_features) @@ -1529,18 +1535,70 @@ impl<'gctx> Workspace<'gctx> { .sorted() .collect(); - if suggestions.is_empty() { - bail!( - "none of the selected packages contains these features: {}", + let (selected_members, unselected_members): (Vec<_>, Vec<_>) = self + .members() + .partition(|member| specs.iter().any(|spec| spec.matches(member.package_id()))); + + let missing_packages_with_the_features = unselected_members + .into_iter() + .filter(|member| { + unknown + .iter() + .any(|feature| member.summary().features().contains_key(&**feature)) + }) + .map(|m| m.name()) + .collect_vec(); + + let these_features = if unknown.len() == 1 { + "this feature" + } else { + "these features" + }; + let mut msg = if let [singular] = &selected_members[..] { + format!( + "the package '{}' does not contain {these_features}: {}", + singular.name(), unknown.join(", ") - ); + ) } else { - bail!( - "none of the selected packages contains these features: {}, did you mean: {}?", - unknown.join(", "), - suggestions.join(", ") + let names = selected_members.iter().map(|m| m.name()).join(", "); + format!("none of the selected packages contains {these_features}: {}\nselected packages: {names}", unknown.join(", ")) + }; + + use std::fmt::Write; + if !missing_packages_with_the_features.is_empty() { + write!( + &mut msg, + "\nhelp: package{} with the missing feature{}: {}", + if missing_packages_with_the_features.len() != 1 { + "s" + } else { + "" + }, + if unknown.len() != 1 { "s" } else { "" }, + missing_packages_with_the_features.join(", ") + )?; + } else { + let suggestions = self.missing_feature_spelling_suggestions( + &selected_members, + cli_features, + found_features, ); + if !suggestions.is_empty() { + write!( + &mut msg, + "\nhelp: there {}: {}", + if suggestions.len() == 1 { + "is a similarly named feature" + } else { + "are similarly named features" + }, + suggestions.join(", ") + )?; + } } + + bail!("{msg}") } /// New command-line feature selection behavior with resolver = "2" or the @@ -1694,6 +1752,44 @@ impl<'gctx> Workspace<'gctx> { // Cargo to panic, see issue #10545. self.is_member(&unit.pkg) && !(unit.target.for_host() || unit.pkg.proc_macro()) } + + /// Adds a local package registry overlaying a `SourceId`. + /// + /// See [`crate::sources::overlay::DependencyConfusionThreatOverlaySource`] for why you shouldn't use this. + pub fn add_local_overlay(&mut self, id: SourceId, registry_path: PathBuf) { + self.local_overlays.insert(id, registry_path); + } + + /// Builds a package registry that reflects this workspace configuration. + pub fn package_registry(&self) -> CargoResult> { + let source_config = + SourceConfigMap::new_with_overlays(self.gctx(), self.local_overlays()?)?; + PackageRegistry::new_with_source_config(self.gctx(), source_config) + } + + /// Returns all the configured local overlays, including the ones from our secret environment variable. + fn local_overlays(&self) -> CargoResult> { + let mut ret = self + .local_overlays + .iter() + .map(|(id, path)| Ok((*id, SourceId::for_local_registry(path)?))) + .collect::>>()?; + + if let Ok(overlay) = self + .gctx + .get_env("__CARGO_TEST_DEPENDENCY_CONFUSION_VULNERABILITY_DO_NOT_USE_THIS") + { + let (url, path) = overlay.split_once('=').ok_or(anyhow::anyhow!( + "invalid overlay format. I won't tell you why; you shouldn't be using it anyway" + ))?; + ret.push(( + SourceId::from_url(url)?, + SourceId::for_local_registry(path.as_ref())?, + )); + } + + Ok(ret.into_iter()) + } } impl<'gctx> Packages<'gctx> { @@ -1790,6 +1886,7 @@ impl WorkspaceRootConfig { self.members.is_some() } + #[tracing::instrument(skip_all)] fn members_paths(&self, globs: &[String]) -> CargoResult> { let mut expanded_list = Vec::new(); diff --git a/src/cargo/lib.rs b/src/cargo/lib.rs index 16ed4cfaa3a..7dc25ad14a0 100644 --- a/src/cargo/lib.rs +++ b/src/cargo/lib.rs @@ -7,9 +7,10 @@ //! - : targeted at cargo contributors //! - Updated on each update of the `cargo` submodule in `rust-lang/rust` //! -//! **WARNING:** Using Cargo as a library has drawbacks, particularly the API is unstable, -//! and there is no clear path to stabilize it soon at the time of writing. See [The Cargo Book: -//! External tools] for more on this topic. +//! > This library is maintained by the Cargo team, primarily for use by Cargo +//! > and not intended for external use (except as a transitive dependency). This +//! > crate may make major changes to its APIs. See [The Cargo Book: +//! > External tools] for more on this topic. //! //! ## Overview //! diff --git a/src/cargo/ops/cargo_add/mod.rs b/src/cargo/ops/cargo_add/mod.rs index 1781d4c9359..ac49f58e1d3 100644 --- a/src/cargo/ops/cargo_add/mod.rs +++ b/src/cargo/ops/cargo_add/mod.rs @@ -12,6 +12,7 @@ use std::str::FromStr; use anyhow::Context as _; use cargo_util::paths; use cargo_util_schemas::core::PartialVersion; +use cargo_util_schemas::manifest::PathBaseName; use cargo_util_schemas::manifest::RustVersion; use indexmap::IndexSet; use itertools::Itertools; @@ -20,6 +21,7 @@ use toml_edit::Item as TomlItem; use crate::core::dependency::DepKind; use crate::core::registry::PackageRegistry; use crate::core::FeatureValue; +use crate::core::Features; use crate::core::Package; use crate::core::Registry; use crate::core::Shell; @@ -28,6 +30,7 @@ use crate::core::Workspace; use crate::sources::source::QueryKind; use crate::util::cache_lock::CacheLockMode; use crate::util::style; +use crate::util::toml::lookup_path_base; use crate::util::toml_mut::dependency::Dependency; use crate::util::toml_mut::dependency::GitSource; use crate::util::toml_mut::dependency::MaybeWorkspace; @@ -78,7 +81,7 @@ pub fn add(workspace: &Workspace<'_>, options: &AddOptions<'_>) -> CargoResult<( ); } - let mut registry = PackageRegistry::new(options.gctx)?; + let mut registry = workspace.package_registry()?; let deps = { let _lock = options @@ -197,7 +200,13 @@ pub fn add(workspace: &Workspace<'_>, options: &AddOptions<'_>) -> CargoResult<( print_dep_table_msg(&mut options.gctx.shell(), &dep)?; - manifest.insert_into_table(&dep_table, &dep)?; + manifest.insert_into_table( + &dep_table, + &dep, + workspace.gctx(), + workspace.root(), + options.spec.manifest().unstable_features(), + )?; if dep.optional == Some(true) { let is_namespaced_features_supported = check_rust_version_for_optional_dependency(options.spec.rust_version())?; @@ -270,8 +279,11 @@ pub struct DepOp { /// Registry for looking up dependency version pub registry: Option, - /// Git repo for dependency + /// File system path for dependency pub path: Option, + /// Specify a named base for a path dependency + pub base: Option, + /// Git repo for dependency pub git: Option, /// Specify an alternative git branch @@ -332,7 +344,19 @@ fn resolve_dependency( selected } else if let Some(raw_path) = &arg.path { let path = paths::normalize_path(&std::env::current_dir()?.join(raw_path)); - let src = PathSource::new(&path); + let mut src = PathSource::new(path); + src.base = arg.base.clone(); + + if let Some(base) = &arg.base { + // Validate that the base is valid. + let workspace_root = || Ok(ws.root_manifest().parent().unwrap()); + lookup_path_base( + &PathBaseName::new(base.clone())?, + &gctx, + &workspace_root, + spec.manifest().unstable_features(), + )?; + } let selected = if let Some(crate_spec) = &crate_spec { if let Some(v) = crate_spec.version_req() { @@ -349,12 +373,13 @@ fn resolve_dependency( } selected } else { - let source = crate::sources::PathSource::new(&path, src.source_id()?, gctx); - let package = source - .read_packages()? - .pop() - .expect("read_packages errors when no packages"); - Dependency::from(package.summary()) + let mut source = crate::sources::PathSource::new(&src.path, src.source_id()?, gctx); + let package = source.root_package()?; + let mut selected = Dependency::from(package.summary()); + if let Some(Source::Path(selected_src)) = &mut selected.source { + selected_src.base = src.base; + } + selected }; selected } else if let Some(crate_spec) = &crate_spec { @@ -364,7 +389,16 @@ fn resolve_dependency( }; selected_dep = populate_dependency(selected_dep, arg); - let old_dep = get_existing_dependency(manifest, selected_dep.toml_key(), section)?; + let lookup = |dep_key: &_| { + get_existing_dependency( + ws, + spec.manifest().unstable_features(), + manifest, + dep_key, + section, + ) + }; + let old_dep = fuzzy_lookup(&mut selected_dep, lookup, gctx)?; let mut dependency = if let Some(mut old_dep) = old_dep.clone() { if old_dep.name != selected_dep.name { // Assuming most existing keys are not relevant when the package changes @@ -386,7 +420,10 @@ fn resolve_dependency( if dependency.source().is_none() { // Checking for a workspace dependency happens first since a member could be specified // in the workspace dependencies table as a dependency - if let Some(_dep) = find_workspace_dep(dependency.toml_key(), ws.root_manifest()).ok() { + let lookup = |toml_key: &_| { + Ok(find_workspace_dep(toml_key, ws, ws.root_manifest(), ws.unstable_features()).ok()) + }; + if let Some(_dep) = fuzzy_lookup(&mut dependency, lookup, gctx)? { dependency = dependency.set_source(WorkspaceSource::new()); } else if let Some(package) = ws.members().find(|p| p.name().as_str() == dependency.name) { // Only special-case workspaces when the user doesn't provide any extra @@ -432,7 +469,12 @@ fn resolve_dependency( let query = dependency.query(gctx)?; let query = match query { MaybeWorkspace::Workspace(_workspace) => { - let dep = find_workspace_dep(dependency.toml_key(), ws.root_manifest())?; + let dep = find_workspace_dep( + dependency.toml_key(), + ws, + ws.root_manifest(), + ws.unstable_features(), + )?; if let Some(features) = dep.features.clone() { dependency = dependency.set_inherited_features(features); } @@ -452,6 +494,42 @@ fn resolve_dependency( Ok(dependency) } +fn fuzzy_lookup( + dependency: &mut Dependency, + lookup: impl Fn(&str) -> CargoResult>, + gctx: &GlobalContext, +) -> CargoResult> { + if let Some(rename) = dependency.rename() { + // Manually implement `toml_key` to restrict fuzzy lookups to only package names to mirror `PackageRegistry::query()` + return lookup(rename); + } + + for name_permutation in [ + dependency.name.clone(), + dependency.name.replace('-', "_"), + dependency.name.replace('_', "-"), + ] { + let Some(dep) = lookup(&name_permutation)? else { + continue; + }; + + if dependency.name != name_permutation { + // Mirror the fuzzy matching policy of `PackageRegistry::query()` + if !matches!(dep.source, Some(Source::Registry(_))) { + continue; + } + gctx.shell().warn(format!( + "translating `{}` to `{}`", + dependency.name, &name_permutation, + ))?; + dependency.name = name_permutation; + } + return Ok(Some(dep)); + } + + Ok(None) +} + /// When { workspace = true } you cannot define other keys that configure /// the source of the dependency such as `version`, `registry`, `registry-index`, /// `path`, `git`, `branch`, `tag`, `rev`, or `package`. You can also not define @@ -511,6 +589,8 @@ fn check_rust_version_for_optional_dependency( /// If it doesn't exist but exists in another table, let's use that as most likely users /// want to use the same version across all tables unless they are renaming. fn get_existing_dependency( + ws: &Workspace<'_>, + unstable_features: &Features, manifest: &LocalManifest, dep_key: &str, section: &DepTable, @@ -525,7 +605,7 @@ fn get_existing_dependency( } let mut possible: Vec<_> = manifest - .get_dependency_versions(dep_key) + .get_dependency_versions(dep_key, ws, unstable_features) .map(|(path, dep)| { let key = if path == *section { (Key::Existing, true) @@ -740,6 +820,11 @@ fn select_package( if let Some(reg_name) = dependency.registry.as_deref() { dep = dep.set_registry(reg_name); } + if let Some(Source::Path(PathSource { base, .. })) = dependency.source() { + if let Some(Source::Path(dep_src)) = &mut dep.source { + dep_src.base = base.clone(); + } + } Ok(dep) } _ => { @@ -1071,7 +1156,12 @@ fn format_features_version_suffix(dep: &DependencyUI) -> String { } } -fn find_workspace_dep(toml_key: &str, root_manifest: &Path) -> CargoResult { +fn find_workspace_dep( + toml_key: &str, + ws: &Workspace<'_>, + root_manifest: &Path, + unstable_features: &Features, +) -> CargoResult { let manifest = LocalManifest::try_new(root_manifest)?; let manifest = manifest .data @@ -1088,11 +1178,17 @@ fn find_workspace_dep(toml_key: &str, root_manifest: &Path) -> CargoResult { pub gctx: &'gctx GlobalContext, @@ -75,7 +77,14 @@ pub fn clean(ws: &Workspace<'_>, opts: &CleanOptions<'_>) -> CargoResult<()> { if opts.spec.is_empty() { clean_ctx.remove_paths(&[target_dir.into_path_unlocked()])?; } else { - clean_specs(&mut clean_ctx, &ws, &profiles, &opts.targets, &opts.spec)?; + clean_specs( + &mut clean_ctx, + &ws, + &profiles, + &opts.targets, + &opts.spec, + opts.dry_run, + )?; } } @@ -89,11 +98,12 @@ fn clean_specs( profiles: &Profiles, targets: &[String], spec: &[String], + dry_run: bool, ) -> CargoResult<()> { // Clean specific packages. let requested_kinds = CompileKind::from_requested_targets(clean_ctx.gctx, targets)?; let target_data = RustcTargetData::new(ws, &requested_kinds)?; - let (pkg_set, resolve) = ops::resolve_ws(ws)?; + let (pkg_set, resolve) = ops::resolve_ws(ws, dry_run)?; let prof_dir_name = profiles.get_dir_name(); let host_layout = Layout::new(ws, None, &prof_dir_name)?; // Convert requested kinds to a Vec of layouts. @@ -169,6 +179,9 @@ fn clean_specs( clean_ctx.progress = Box::new(CleaningPackagesBar::new(clean_ctx.gctx, packages.len())); + // Try to reduce the amount of times we iterate over the same target directory by storing away + // the directories we've iterated over (and cleaned for a given package). + let mut cleaned_packages: HashMap<_, HashSet<_>> = HashMap::default(); for pkg in packages { let pkg_dir = format!("{}-*", pkg.name()); clean_ctx.progress.on_cleaning_package(&pkg.name())?; @@ -192,7 +205,9 @@ fn clean_specs( } continue; } - let crate_name = target.crate_name(); + let crate_name: Rc = target.crate_name().into(); + let path_dot: &str = &format!("{crate_name}."); + let path_dash: &str = &format!("{crate_name}-"); for &mode in &[ CompileMode::Build, CompileMode::Test, @@ -212,28 +227,15 @@ fn clean_specs( TargetKind::Test | TargetKind::Bench => (layout.deps(), None), _ => (layout.deps(), Some(layout.dest())), }; + let mut dir_glob_str = escape_glob_path(dir)?; + let dir_glob = Path::new(&dir_glob_str); for file_type in file_types { // Some files include a hash in the filename, some don't. let hashed_name = file_type.output_filename(target, Some("*")); let unhashed_name = file_type.output_filename(target, None); - let dir_glob = escape_glob_path(dir)?; - let dir_glob = Path::new(&dir_glob); clean_ctx.rm_rf_glob(&dir_glob.join(&hashed_name))?; clean_ctx.rm_rf(&dir.join(&unhashed_name))?; - // Remove dep-info file generated by rustc. It is not tracked in - // file_types. It does not have a prefix. - let hashed_dep_info = dir_glob.join(format!("{}-*.d", crate_name)); - clean_ctx.rm_rf_glob(&hashed_dep_info)?; - let unhashed_dep_info = dir.join(format!("{}.d", crate_name)); - clean_ctx.rm_rf(&unhashed_dep_info)?; - // Remove split-debuginfo files generated by rustc. - let split_debuginfo_obj = dir_glob.join(format!("{}.*.o", crate_name)); - clean_ctx.rm_rf_glob(&split_debuginfo_obj)?; - let split_debuginfo_dwo = dir_glob.join(format!("{}.*.dwo", crate_name)); - clean_ctx.rm_rf_glob(&split_debuginfo_dwo)?; - let split_debuginfo_dwp = dir_glob.join(format!("{}.*.dwp", crate_name)); - clean_ctx.rm_rf_glob(&split_debuginfo_dwp)?; // Remove the uplifted copy. if let Some(uplift_dir) = uplift_dir { @@ -244,6 +246,31 @@ fn clean_specs( clean_ctx.rm_rf(&dep_info)?; } } + let unhashed_dep_info = dir.join(format!("{}.d", crate_name)); + clean_ctx.rm_rf(&unhashed_dep_info)?; + + if !dir_glob_str.ends_with(std::path::MAIN_SEPARATOR) { + dir_glob_str.push(std::path::MAIN_SEPARATOR); + } + dir_glob_str.push('*'); + let dir_glob_str: Rc = dir_glob_str.into(); + if cleaned_packages + .entry(dir_glob_str.clone()) + .or_default() + .insert(crate_name.clone()) + { + let paths = [ + // Remove dep-info file generated by rustc. It is not tracked in + // file_types. It does not have a prefix. + (path_dash, ".d"), + // Remove split-debuginfo files generated by rustc. + (path_dot, ".o"), + (path_dot, ".dwo"), + (path_dot, ".dwp"), + ]; + clean_ctx.rm_rf_prefix_list(&dir_glob_str, &paths)?; + } + // TODO: what to do about build_script_build? let dir = escape_glob_path(layout.incremental())?; let incremental = Path::new(&dir).join(format!("{}-*", crate_name)); @@ -322,6 +349,30 @@ impl<'gctx> CleanContext<'gctx> { Ok(()) } + /// Removes files matching a glob and any of the provided filename patterns (prefix/suffix pairs). + /// + /// This function iterates over files matching a glob (`pattern`) and removes those whose + /// filenames start and end with specific prefix/suffix pairs. It should be more efficient for + /// operations involving multiple prefix/suffix pairs, as it iterates over the directory + /// only once, unlike making multiple calls to [`Self::rm_rf_glob`]. + fn rm_rf_prefix_list( + &mut self, + pattern: &str, + path_matchers: &[(&str, &str)], + ) -> CargoResult<()> { + for path in glob::glob(pattern)? { + let path = path?; + let filename = path.file_name().and_then(|name| name.to_str()).unwrap(); + if path_matchers + .iter() + .any(|(prefix, suffix)| filename.starts_with(prefix) && filename.ends_with(suffix)) + { + self.rm_rf(&path)?; + } + } + Ok(()) + } + pub fn rm_rf(&mut self, path: &Path) -> CargoResult<()> { let meta = match fs::symlink_metadata(path) { Ok(meta) => meta, diff --git a/src/cargo/ops/cargo_compile/mod.rs b/src/cargo/ops/cargo_compile/mod.rs index 68f1df76ba6..81c197ac719 100644 --- a/src/cargo/ops/cargo_compile/mod.rs +++ b/src/cargo/ops/cargo_compile/mod.rs @@ -35,7 +35,6 @@ //! [`drain_the_queue`]: crate::core::compiler::job_queue //! ["Cargo Target"]: https://doc.rust-lang.org/nightly/cargo/reference/cargo-targets.html -use cargo_platform::Cfg; use std::collections::{HashMap, HashSet}; use std::hash::{Hash, Hasher}; use std::sync::Arc; @@ -157,7 +156,11 @@ pub fn compile_ws<'a>( } crate::core::gc::auto_gc(bcx.gctx); let build_runner = BuildRunner::new(&bcx)?; - build_runner.compile(exec) + if options.build_config.dry_run { + build_runner.dry_run() + } else { + build_runner.compile(exec) + } } /// Executes `rustc --print `. @@ -264,6 +267,7 @@ pub fn create_bcx<'a, 'gctx>( HasDevUnits::No } }; + let dry_run = false; let resolve = ops::resolve_ws_with_opts( ws, &mut target_data, @@ -272,6 +276,7 @@ pub fn create_bcx<'a, 'gctx>( &specs, has_dev_units, crate::core::resolver::features::ForceAllTargets::No, + dry_run, )?; let WorkspaceResolve { mut pkg_set, @@ -359,6 +364,7 @@ pub fn create_bcx<'a, 'gctx>( let generator = UnitGenerator { ws, packages: &to_builds, + target_data: &target_data, filter, requested_kinds: &build_config.requested_kinds, explicit_host_kind, @@ -398,6 +404,7 @@ pub fn create_bcx<'a, 'gctx>( &pkg_set, interner, &profiles, + &target_data, )? } else { Default::default() @@ -437,7 +444,6 @@ pub fn create_bcx<'a, 'gctx>( &units, &scrape_units, host_kind_requested.then_some(explicit_host_kind), - &target_data, ); let mut extra_compiler_args = HashMap::new(); @@ -496,7 +502,10 @@ pub fn create_bcx<'a, 'gctx>( let plural = if incompatible.len() == 1 { "" } else { "s" }; let mut message = format!( - "rustc {rustc_version} is not supported by the following package{plural}:\n" + "rustc {rustc_version} is not supported by the following package{plural}:\n + Note that this is the rustc version that ships with Solana tools and \ + not your system's rustc version. Use `solana-install update` or head \ + over to https://docs.solanalabs.com/cli/install to install a newer version.\n", ); incompatible.sort_by_key(|(unit, _)| (unit.pkg.name(), unit.pkg.version())); for (unit, msrv) in incompatible { @@ -577,7 +586,6 @@ fn rebuild_unit_graph_shared( roots: &[Unit], scrape_units: &[Unit], to_host: Option, - target_data: &RustcTargetData<'_>, ) -> (Vec, Vec, UnitGraph) { let mut result = UnitGraph::new(); // Map of the old unit to the new unit, used to avoid recursing into units @@ -594,7 +602,6 @@ fn rebuild_unit_graph_shared( root, false, to_host, - target_data, ) }) .collect(); @@ -621,7 +628,6 @@ fn traverse_and_share( unit: &Unit, unit_is_for_host: bool, to_host: Option, - target_data: &RustcTargetData<'_>, ) -> Unit { if let Some(new_unit) = memo.get(unit) { // Already computed, no need to recompute. @@ -639,7 +645,6 @@ fn traverse_and_share( &dep.unit, dep.unit_for.is_for_host(), to_host, - target_data, ); new_dep_unit.hash(&mut dep_hash); UnitDep { @@ -663,13 +668,8 @@ fn traverse_and_share( _ => unit.kind, }; - let cfg = target_data.cfg(unit.kind); - let is_target_windows_msvc = cfg.contains(&Cfg::Name("windows".to_string())) - && cfg.contains(&Cfg::KeyPair("target_env".to_string(), "msvc".to_string())); let mut profile = unit.profile.clone(); - // For MSVC, rustc currently treats -Cstrip=debuginfo same as -Cstrip=symbols, which causes - // this optimization to also remove symbols and thus break backtraces. - if profile.strip.is_deferred() && !is_target_windows_msvc { + if profile.strip.is_deferred() { // If strip was not manually set, and all dependencies of this unit together // with this unit have debuginfo turned off, we enable debuginfo stripping. // This will remove pre-existing debug symbols coming from the standard library. @@ -703,6 +703,9 @@ fn traverse_and_share( to_host.unwrap(), unit.mode, unit.features.clone(), + unit.rustflags.clone(), + unit.rustdocflags.clone(), + unit.links_overrides.clone(), unit.is_std, unit.dep_hash, unit.artifact, @@ -728,6 +731,9 @@ fn traverse_and_share( canonical_kind, unit.mode, unit.features.clone(), + unit.rustflags.clone(), + unit.rustdocflags.clone(), + unit.links_overrides.clone(), unit.is_std, new_dep_hash, unit.artifact, @@ -889,6 +895,9 @@ fn override_rustc_crate_types( unit.kind, unit.mode, unit.features.clone(), + unit.rustflags.clone(), + unit.rustdocflags.clone(), + unit.links_overrides.clone(), unit.is_std, unit.dep_hash, unit.artifact, diff --git a/src/cargo/ops/cargo_compile/unit_generator.rs b/src/cargo/ops/cargo_compile/unit_generator.rs index b2d86b7531c..ce10e173c6c 100644 --- a/src/cargo/ops/cargo_compile/unit_generator.rs +++ b/src/cargo/ops/cargo_compile/unit_generator.rs @@ -4,8 +4,8 @@ use std::fmt::Write; use crate::core::compiler::rustdoc::RustdocScrapeExamples; use crate::core::compiler::unit_dependencies::IsArtifact; -use crate::core::compiler::UnitInterner; use crate::core::compiler::{CompileKind, CompileMode, Unit}; +use crate::core::compiler::{RustcTargetData, UnitInterner}; use crate::core::dependency::DepKind; use crate::core::profiles::{Profiles, UnitFor}; use crate::core::resolver::features::{self, FeaturesFor}; @@ -47,6 +47,7 @@ struct Proposal<'a> { pub(super) struct UnitGenerator<'a, 'gctx> { pub ws: &'a Workspace<'gctx>, pub packages: &'a [&'a Package], + pub target_data: &'a RustcTargetData<'gctx>, pub filter: &'a CompileFilter, pub requested_kinds: &'a [CompileKind], pub explicit_host_kind: CompileKind, @@ -162,13 +163,17 @@ impl<'a> UnitGenerator<'a, '_> { unit_for, kind, ); + let kind = kind.for_target(target); self.interner.intern( pkg, target, profile, - kind.for_target(target), + kind, target_mode, features.clone(), + self.target_data.info(kind).rustflags.clone(), + self.target_data.info(kind).rustdocflags.clone(), + self.target_data.target_config(kind).links_overrides.clone(), /*is_std*/ false, /*dep_hash*/ 0, IsArtifact::No, diff --git a/src/cargo/ops/cargo_fetch.rs b/src/cargo/ops/cargo_fetch.rs index 761f171f1f6..37b56438cd6 100644 --- a/src/cargo/ops/cargo_fetch.rs +++ b/src/cargo/ops/cargo_fetch.rs @@ -19,7 +19,8 @@ pub fn fetch<'a>( options: &FetchOptions<'a>, ) -> CargoResult<(Resolve, PackageSet<'a>)> { ws.emit_warnings()?; - let (mut packages, resolve) = ops::resolve_ws(ws)?; + let dry_run = false; + let (mut packages, resolve) = ops::resolve_ws(ws, dry_run)?; let jobs = Some(JobsConfig::Integer(1)); let keep_going = false; diff --git a/src/cargo/ops/cargo_generate_lockfile.rs b/src/cargo/ops/cargo_generate_lockfile.rs deleted file mode 100644 index fc22608e8f2..00000000000 --- a/src/cargo/ops/cargo_generate_lockfile.rs +++ /dev/null @@ -1,691 +0,0 @@ -use crate::core::registry::PackageRegistry; -use crate::core::resolver::features::{CliFeatures, HasDevUnits}; -use crate::core::shell::Verbosity; -use crate::core::Registry as _; -use crate::core::{PackageId, PackageIdSpec, PackageIdSpecQuery}; -use crate::core::{Resolve, SourceId, Workspace}; -use crate::ops; -use crate::sources::source::QueryKind; -use crate::util::cache_lock::CacheLockMode; -use crate::util::context::GlobalContext; -use crate::util::style; -use crate::util::CargoResult; -use std::cmp::Ordering; -use std::collections::{BTreeMap, HashSet}; -use tracing::debug; - -pub struct UpdateOptions<'a> { - pub gctx: &'a GlobalContext, - pub to_update: Vec, - pub precise: Option<&'a str>, - pub recursive: bool, - pub dry_run: bool, - pub workspace: bool, -} - -pub fn generate_lockfile(ws: &Workspace<'_>) -> CargoResult<()> { - let mut registry = PackageRegistry::new(ws.gctx())?; - let previous_resolve = None; - let mut resolve = ops::resolve_with_previous( - &mut registry, - ws, - &CliFeatures::new_all(true), - HasDevUnits::Yes, - previous_resolve, - None, - &[], - true, - )?; - ops::write_pkg_lockfile(ws, &mut resolve)?; - print_lockfile_changes(ws, previous_resolve, &resolve, &mut registry)?; - Ok(()) -} - -pub fn update_lockfile(ws: &Workspace<'_>, opts: &UpdateOptions<'_>) -> CargoResult<()> { - if opts.recursive && opts.precise.is_some() { - anyhow::bail!("cannot specify both recursive and precise simultaneously") - } - - if ws.members().count() == 0 { - anyhow::bail!("you can't generate a lockfile for an empty workspace.") - } - - // Updates often require a lot of modifications to the registry, so ensure - // that we're synchronized against other Cargos. - let _lock = ws - .gctx() - .acquire_package_cache_lock(CacheLockMode::DownloadExclusive)?; - - let previous_resolve = match ops::load_pkg_lockfile(ws)? { - Some(resolve) => resolve, - None => { - match opts.precise { - None => return generate_lockfile(ws), - - // Precise option specified, so calculate a previous_resolve required - // by precise package update later. - Some(_) => { - let mut registry = PackageRegistry::new(opts.gctx)?; - ops::resolve_with_previous( - &mut registry, - ws, - &CliFeatures::new_all(true), - HasDevUnits::Yes, - None, - None, - &[], - true, - )? - } - } - } - }; - let mut registry = PackageRegistry::new(opts.gctx)?; - let mut to_avoid = HashSet::new(); - - if opts.to_update.is_empty() { - if !opts.workspace { - to_avoid.extend(previous_resolve.iter()); - to_avoid.extend(previous_resolve.unused_patches()); - } - } else { - let mut sources = Vec::new(); - for name in opts.to_update.iter() { - let pid = previous_resolve.query(name)?; - if opts.recursive { - fill_with_deps(&previous_resolve, pid, &mut to_avoid, &mut HashSet::new()); - } else { - to_avoid.insert(pid); - sources.push(match opts.precise { - Some(precise) => { - // TODO: see comment in `resolve.rs` as well, but this - // seems like a pretty hokey reason to single out - // the registry as well. - if pid.source_id().is_registry() { - pid.source_id().with_precise_registry_version( - pid.name(), - pid.version().clone(), - precise, - )? - } else { - pid.source_id().with_git_precise(Some(precise.to_string())) - } - } - None => pid.source_id().without_precise(), - }); - } - if let Ok(unused_id) = - PackageIdSpec::query_str(name, previous_resolve.unused_patches().iter().cloned()) - { - to_avoid.insert(unused_id); - } - } - - // Mirror `--workspace` and never avoid workspace members. - // Filtering them out here so the above processes them normally - // so their dependencies can be updated as requested - to_avoid = to_avoid - .into_iter() - .filter(|id| { - for package in ws.members() { - let member_id = package.package_id(); - // Skip checking the `version` because `previous_resolve` might have a stale - // value. - // When dealing with workspace members, the other fields should be a - // sufficiently unique match. - if id.name() == member_id.name() && id.source_id() == member_id.source_id() { - return false; - } - } - true - }) - .collect(); - - registry.add_sources(sources)?; - } - - // Here we place an artificial limitation that all non-registry sources - // cannot be locked at more than one revision. This means that if a Git - // repository provides more than one package, they must all be updated in - // step when any of them are updated. - // - // TODO: this seems like a hokey reason to single out the registry as being - // different. - let to_avoid_sources: HashSet<_> = to_avoid - .iter() - .map(|p| p.source_id()) - .filter(|s| !s.is_registry()) - .collect(); - - let keep = |p: &PackageId| !to_avoid_sources.contains(&p.source_id()) && !to_avoid.contains(p); - - let mut resolve = ops::resolve_with_previous( - &mut registry, - ws, - &CliFeatures::new_all(true), - HasDevUnits::Yes, - Some(&previous_resolve), - Some(&keep), - &[], - true, - )?; - - print_lockfile_updates( - ws, - &previous_resolve, - &resolve, - opts.precise.is_some(), - &mut registry, - )?; - if opts.dry_run { - opts.gctx - .shell() - .warn("not updating lockfile due to dry run")?; - } else { - ops::write_pkg_lockfile(ws, &mut resolve)?; - } - Ok(()) -} - -/// Prints lockfile change statuses. -/// -/// This would acquire the package-cache lock, as it may update the index to -/// show users latest available versions. -pub fn print_lockfile_changes( - ws: &Workspace<'_>, - previous_resolve: Option<&Resolve>, - resolve: &Resolve, - registry: &mut PackageRegistry<'_>, -) -> CargoResult<()> { - let _lock = ws - .gctx() - .acquire_package_cache_lock(CacheLockMode::DownloadExclusive)?; - if let Some(previous_resolve) = previous_resolve { - print_lockfile_sync(ws, previous_resolve, resolve, registry) - } else { - print_lockfile_generation(ws, resolve, registry) - } -} - -fn print_lockfile_generation( - ws: &Workspace<'_>, - resolve: &Resolve, - registry: &mut PackageRegistry<'_>, -) -> CargoResult<()> { - let diff = PackageDiff::new(&resolve); - let num_pkgs: usize = diff.iter().map(|d| d.added.len()).sum(); - if num_pkgs <= 1 { - // just ourself, nothing worth reporting - return Ok(()); - } - status_locking(ws, num_pkgs)?; - - for diff in diff { - fn format_latest(version: semver::Version) -> String { - let warn = style::WARN; - format!(" {warn}(latest: v{version}){warn:#}") - } - let possibilities = if let Some(query) = diff.alternatives_query() { - loop { - match registry.query_vec(&query, QueryKind::Exact) { - std::task::Poll::Ready(res) => { - break res?; - } - std::task::Poll::Pending => registry.block_until_ready()?, - } - } - } else { - vec![] - }; - - for package in diff.added.iter() { - let latest = if !possibilities.is_empty() { - possibilities - .iter() - .map(|s| s.as_summary()) - .filter(|s| is_latest(s.version(), package.version())) - .map(|s| s.version().clone()) - .max() - .map(format_latest) - } else { - None - }; - - if let Some(latest) = latest { - ws.gctx().shell().status_with_color( - "Adding", - format!("{package}{latest}"), - &style::NOTE, - )?; - } - } - } - - Ok(()) -} - -fn print_lockfile_sync( - ws: &Workspace<'_>, - previous_resolve: &Resolve, - resolve: &Resolve, - registry: &mut PackageRegistry<'_>, -) -> CargoResult<()> { - let diff = PackageDiff::diff(&previous_resolve, &resolve); - let num_pkgs: usize = diff.iter().map(|d| d.added.len()).sum(); - if num_pkgs == 0 { - return Ok(()); - } - status_locking(ws, num_pkgs)?; - - for diff in diff { - fn format_latest(version: semver::Version) -> String { - let warn = style::WARN; - format!(" {warn}(latest: v{version}){warn:#}") - } - let possibilities = if let Some(query) = diff.alternatives_query() { - loop { - match registry.query_vec(&query, QueryKind::Exact) { - std::task::Poll::Ready(res) => { - break res?; - } - std::task::Poll::Pending => registry.block_until_ready()?, - } - } - } else { - vec![] - }; - - if let Some((removed, added)) = diff.change() { - let latest = if !possibilities.is_empty() { - possibilities - .iter() - .map(|s| s.as_summary()) - .filter(|s| is_latest(s.version(), added.version())) - .map(|s| s.version().clone()) - .max() - .map(format_latest) - } else { - None - } - .unwrap_or_default(); - - let msg = if removed.source_id().is_git() { - format!( - "{removed} -> #{}", - &added.source_id().precise_git_fragment().unwrap()[..8], - ) - } else { - format!("{removed} -> v{}{latest}", added.version()) - }; - - // If versions differ only in build metadata, we call it an "update" - // regardless of whether the build metadata has gone up or down. - // This metadata is often stuff like git commit hashes, which are - // not meaningfully ordered. - if removed.version().cmp_precedence(added.version()) == Ordering::Greater { - ws.gctx() - .shell() - .status_with_color("Downgrading", msg, &style::WARN)?; - } else { - ws.gctx() - .shell() - .status_with_color("Updating", msg, &style::GOOD)?; - } - } else { - for package in diff.added.iter() { - let latest = if !possibilities.is_empty() { - possibilities - .iter() - .map(|s| s.as_summary()) - .filter(|s| is_latest(s.version(), package.version())) - .map(|s| s.version().clone()) - .max() - .map(format_latest) - } else { - None - } - .unwrap_or_default(); - - ws.gctx().shell().status_with_color( - "Adding", - format!("{package}{latest}"), - &style::NOTE, - )?; - } - } - } - - Ok(()) -} - -fn print_lockfile_updates( - ws: &Workspace<'_>, - previous_resolve: &Resolve, - resolve: &Resolve, - precise: bool, - registry: &mut PackageRegistry<'_>, -) -> CargoResult<()> { - let diff = PackageDiff::diff(&previous_resolve, &resolve); - let num_pkgs: usize = diff.iter().map(|d| d.added.len()).sum(); - if !precise { - status_locking(ws, num_pkgs)?; - } - - let mut unchanged_behind = 0; - for diff in diff { - fn format_latest(version: semver::Version) -> String { - let warn = style::WARN; - format!(" {warn}(latest: v{version}){warn:#}") - } - let possibilities = if let Some(query) = diff.alternatives_query() { - loop { - match registry.query_vec(&query, QueryKind::Exact) { - std::task::Poll::Ready(res) => { - break res?; - } - std::task::Poll::Pending => registry.block_until_ready()?, - } - } - } else { - vec![] - }; - - if let Some((removed, added)) = diff.change() { - let latest = if !possibilities.is_empty() { - possibilities - .iter() - .map(|s| s.as_summary()) - .filter(|s| is_latest(s.version(), added.version())) - .map(|s| s.version().clone()) - .max() - .map(format_latest) - } else { - None - } - .unwrap_or_default(); - - let msg = if removed.source_id().is_git() { - format!( - "{removed} -> #{}", - &added.source_id().precise_git_fragment().unwrap()[..8], - ) - } else { - format!("{removed} -> v{}{latest}", added.version()) - }; - - // If versions differ only in build metadata, we call it an "update" - // regardless of whether the build metadata has gone up or down. - // This metadata is often stuff like git commit hashes, which are - // not meaningfully ordered. - if removed.version().cmp_precedence(added.version()) == Ordering::Greater { - ws.gctx() - .shell() - .status_with_color("Downgrading", msg, &style::WARN)?; - } else { - ws.gctx() - .shell() - .status_with_color("Updating", msg, &style::GOOD)?; - } - } else { - for package in diff.removed.iter() { - ws.gctx().shell().status_with_color( - "Removing", - format!("{package}"), - &style::ERROR, - )?; - } - for package in diff.added.iter() { - let latest = if !possibilities.is_empty() { - possibilities - .iter() - .map(|s| s.as_summary()) - .filter(|s| is_latest(s.version(), package.version())) - .map(|s| s.version().clone()) - .max() - .map(format_latest) - } else { - None - } - .unwrap_or_default(); - - ws.gctx().shell().status_with_color( - "Adding", - format!("{package}{latest}"), - &style::NOTE, - )?; - } - } - for package in &diff.unchanged { - let latest = if !possibilities.is_empty() { - possibilities - .iter() - .map(|s| s.as_summary()) - .filter(|s| is_latest(s.version(), package.version())) - .map(|s| s.version().clone()) - .max() - .map(format_latest) - } else { - None - }; - - if let Some(latest) = latest { - unchanged_behind += 1; - if ws.gctx().shell().verbosity() == Verbosity::Verbose { - ws.gctx().shell().status_with_color( - "Unchanged", - format!("{package}{latest}"), - &anstyle::Style::new().bold(), - )?; - } - } - } - } - - if ws.gctx().shell().verbosity() == Verbosity::Verbose { - ws.gctx().shell().note( - "to see how you depend on a package, run `cargo tree --invert --package @`", - )?; - } else { - if 0 < unchanged_behind { - ws.gctx().shell().note(format!( - "pass `--verbose` to see {unchanged_behind} unchanged dependencies behind latest" - ))?; - } - } - - Ok(()) -} - -fn status_locking(ws: &Workspace<'_>, num_pkgs: usize) -> CargoResult<()> { - use std::fmt::Write as _; - - let plural = if num_pkgs == 1 { "" } else { "s" }; - - let mut cfg = String::new(); - // Don't have a good way to describe `direct_minimal_versions` atm - if !ws.gctx().cli_unstable().direct_minimal_versions { - write!(&mut cfg, " to")?; - if ws.gctx().cli_unstable().minimal_versions { - write!(&mut cfg, " earliest")?; - } else { - write!(&mut cfg, " latest")?; - } - - if ws.resolve_honors_rust_version() { - let rust_version = if let Some(ver) = ws.rust_version() { - ver.clone().into_partial() - } else { - let rustc = ws.gctx().load_global_rustc(Some(ws))?; - let rustc_version = rustc.version.clone().into(); - rustc_version - }; - write!(&mut cfg, " Rust {rust_version}")?; - } - write!(&mut cfg, " compatible version{plural}")?; - } - - ws.gctx() - .shell() - .status("Locking", format!("{num_pkgs} package{plural}{cfg}"))?; - Ok(()) -} - -fn is_latest(candidate: &semver::Version, current: &semver::Version) -> bool { - current < candidate - // Only match pre-release if major.minor.patch are the same - && (candidate.pre.is_empty() - || (candidate.major == current.major - && candidate.minor == current.minor - && candidate.patch == current.patch)) -} - -fn fill_with_deps<'a>( - resolve: &'a Resolve, - dep: PackageId, - set: &mut HashSet, - visited: &mut HashSet, -) { - if !visited.insert(dep) { - return; - } - set.insert(dep); - for (dep, _) in resolve.deps_not_replaced(dep) { - fill_with_deps(resolve, dep, set, visited); - } -} - -/// All resolved versions of a package name within a [`SourceId`] -#[derive(Default, Clone, Debug)] -pub struct PackageDiff { - removed: Vec, - added: Vec, - unchanged: Vec, -} - -impl PackageDiff { - pub fn new(resolve: &Resolve) -> Vec { - let mut changes = BTreeMap::new(); - let empty = Self::default(); - for dep in resolve.iter() { - changes - .entry(Self::key(dep)) - .or_insert_with(|| empty.clone()) - .added - .push(dep); - } - - changes.into_iter().map(|(_, v)| v).collect() - } - - pub fn diff(previous_resolve: &Resolve, resolve: &Resolve) -> Vec { - fn vec_subset(a: &[PackageId], b: &[PackageId]) -> Vec { - a.iter().filter(|a| !contains_id(b, a)).cloned().collect() - } - - fn vec_intersection(a: &[PackageId], b: &[PackageId]) -> Vec { - a.iter().filter(|a| contains_id(b, a)).cloned().collect() - } - - // Check if a PackageId is present `b` from `a`. - // - // Note that this is somewhat more complicated because the equality for source IDs does not - // take precise versions into account (e.g., git shas), but we want to take that into - // account here. - fn contains_id(haystack: &[PackageId], needle: &PackageId) -> bool { - let Ok(i) = haystack.binary_search(needle) else { - return false; - }; - - // If we've found `a` in `b`, then we iterate over all instances - // (we know `b` is sorted) and see if they all have different - // precise versions. If so, then `a` isn't actually in `b` so - // we'll let it through. - // - // Note that we only check this for non-registry sources, - // however, as registries contain enough version information in - // the package ID to disambiguate. - if needle.source_id().is_registry() { - return true; - } - haystack[i..] - .iter() - .take_while(|b| &needle == b) - .any(|b| needle.source_id().has_same_precise_as(b.source_id())) - } - - // Map `(package name, package source)` to `(removed versions, added versions)`. - let mut changes = BTreeMap::new(); - let empty = Self::default(); - for dep in previous_resolve.iter() { - changes - .entry(Self::key(dep)) - .or_insert_with(|| empty.clone()) - .removed - .push(dep); - } - for dep in resolve.iter() { - changes - .entry(Self::key(dep)) - .or_insert_with(|| empty.clone()) - .added - .push(dep); - } - - for v in changes.values_mut() { - let Self { - removed: ref mut old, - added: ref mut new, - unchanged: ref mut other, - } = *v; - old.sort(); - new.sort(); - let removed = vec_subset(old, new); - let added = vec_subset(new, old); - let unchanged = vec_intersection(new, old); - *old = removed; - *new = added; - *other = unchanged; - } - debug!("{:#?}", changes); - - changes.into_iter().map(|(_, v)| v).collect() - } - - fn key(dep: PackageId) -> (&'static str, SourceId) { - (dep.name().as_str(), dep.source_id()) - } - - /// Guess if a package upgraded/downgraded - /// - /// All `PackageDiff` knows is that entries were added/removed within [`Resolve`]. - /// A package could be added or removed because of dependencies from other packages - /// which makes it hard to definitively say "X was upgrade to N". - pub fn change(&self) -> Option<(&PackageId, &PackageId)> { - if self.removed.len() == 1 && self.added.len() == 1 { - Some((&self.removed[0], &self.added[0])) - } else { - None - } - } - - /// For querying [`PackageRegistry`] for alternative versions to report to the user - pub fn alternatives_query(&self) -> Option { - let package_id = [ - self.added.iter(), - self.unchanged.iter(), - self.removed.iter(), - ] - .into_iter() - .flatten() - .next() - // Limit to registry as that is the only source with meaningful alternative versions - .filter(|s| s.source_id().is_registry())?; - let query = crate::core::dependency::Dependency::parse( - package_id.name(), - None, - package_id.source_id(), - ) - .expect("already a valid dependency"); - Some(query) - } -} diff --git a/src/cargo/ops/cargo_install.rs b/src/cargo/ops/cargo_install.rs index a662e891f38..74583deebcb 100644 --- a/src/cargo/ops/cargo_install.rs +++ b/src/cargo/ops/cargo_install.rs @@ -46,7 +46,6 @@ struct InstallablePackage<'gctx> { vers: Option, force: bool, no_track: bool, - pkg: Package, ws: Workspace<'gctx>, rustc: Rustc, @@ -68,6 +67,7 @@ impl<'gctx> InstallablePackage<'gctx> { no_track: bool, needs_update_if_source_is_index: bool, current_rust_version: Option<&PartialVersion>, + lockfile_path: Option<&Path>, ) -> CargoResult> { if let Some(name) = krate { if name == "." { @@ -141,7 +141,7 @@ impl<'gctx> InstallablePackage<'gctx> { select_pkg( &mut src, dep, - |path: &mut PathSource<'_>| path.read_packages(), + |path: &mut PathSource<'_>| path.root_package().map(|p| vec![p]), gctx, current_rust_version, )? @@ -155,6 +155,7 @@ impl<'gctx> InstallablePackage<'gctx> { &root, &dst, force, + lockfile_path, ) { let msg = format!( "package `{}` is already installed, use --force to override", @@ -179,15 +180,32 @@ impl<'gctx> InstallablePackage<'gctx> { } }; - let (ws, rustc, target) = - make_ws_rustc_target(gctx, &original_opts, &source_id, pkg.clone())?; - // If we're installing in --locked mode and there's no `Cargo.lock` published - // ie. the bin was published before https://github.com/rust-lang/cargo/pull/7026 - if gctx.locked() && !ws.root().join("Cargo.lock").exists() { - gctx.shell().warn(format!( - "no Cargo.lock file published in {}", - pkg.to_string() - ))?; + let (ws, rustc, target) = make_ws_rustc_target( + gctx, + &original_opts, + &source_id, + pkg.clone(), + lockfile_path.clone(), + )?; + + if gctx.locked() { + // When --lockfile-path is set, check that passed lock file exists + // (unlike the usual flag behavior, lockfile won't be created as we imply --locked) + if let Some(requested_lockfile_path) = ws.requested_lockfile_path() { + if !requested_lockfile_path.is_file() { + bail!( + "no Cargo.lock file found in the requested path {}", + requested_lockfile_path.display() + ); + } + // If we're installing in --locked mode and there's no `Cargo.lock` published + // ie. the bin was published before https://github.com/rust-lang/cargo/pull/7026 + } else if !ws.root().join("Cargo.lock").exists() { + gctx.shell().warn(format!( + "no Cargo.lock file published in {}", + pkg.to_string() + ))?; + } } let pkg = if source_id.is_git() { // Don't use ws.current() in order to keep the package source as a git source so that @@ -246,7 +264,6 @@ impl<'gctx> InstallablePackage<'gctx> { vers: vers.cloned(), force, no_track, - pkg, ws, rustc, @@ -297,7 +314,7 @@ impl<'gctx> InstallablePackage<'gctx> { Ok(duplicates) } - fn install_one(mut self) -> CargoResult { + fn install_one(mut self, dry_run: bool) -> CargoResult { self.gctx.shell().status("Installing", &self.pkg)?; let dst = self.root.join("bin").into_path_unlocked(); @@ -321,6 +338,7 @@ impl<'gctx> InstallablePackage<'gctx> { self.check_yanked_install()?; let exec: Arc = Arc::new(DefaultExecutor); + self.opts.build_config.dry_run = dry_run; let compile = ops::compile_ws(&self.ws, &self.opts, &exec).with_context(|| { if let Some(td) = td_opt.take() { // preserve the temporary directory, so the user can inspect it @@ -419,13 +437,15 @@ impl<'gctx> InstallablePackage<'gctx> { let staging_dir = TempFileBuilder::new() .prefix("cargo-install") .tempdir_in(&dst)?; - for &(bin, src) in binaries.iter() { - let dst = staging_dir.path().join(bin); - // Try to move if `target_dir` is transient. - if !self.source_id.is_path() && fs::rename(src, &dst).is_ok() { - continue; + if !dry_run { + for &(bin, src) in binaries.iter() { + let dst = staging_dir.path().join(bin); + // Try to move if `target_dir` is transient. + if !self.source_id.is_path() && fs::rename(src, &dst).is_ok() { + continue; + } + paths::copy(src, &dst)?; } - paths::copy(src, &dst)?; } let (to_replace, to_install): (Vec<&str>, Vec<&str>) = binaries @@ -441,11 +461,13 @@ impl<'gctx> InstallablePackage<'gctx> { let src = staging_dir.path().join(bin); let dst = dst.join(bin); self.gctx.shell().status("Installing", dst.display())?; - fs::rename(&src, &dst).with_context(|| { - format!("failed to move `{}` to `{}`", src.display(), dst.display()) - })?; - installed.bins.push(dst); - successful_bins.insert(bin.to_string()); + if !dry_run { + fs::rename(&src, &dst).with_context(|| { + format!("failed to move `{}` to `{}`", src.display(), dst.display()) + })?; + installed.bins.push(dst); + successful_bins.insert(bin.to_string()); + } } // Repeat for binaries which replace existing ones but don't pop the error @@ -456,10 +478,12 @@ impl<'gctx> InstallablePackage<'gctx> { let src = staging_dir.path().join(bin); let dst = dst.join(bin); self.gctx.shell().status("Replacing", dst.display())?; - fs::rename(&src, &dst).with_context(|| { - format!("failed to move `{}` to `{}`", src.display(), dst.display()) - })?; - successful_bins.insert(bin.to_string()); + if !dry_run { + fs::rename(&src, &dst).with_context(|| { + format!("failed to move `{}` to `{}`", src.display(), dst.display()) + })?; + successful_bins.insert(bin.to_string()); + } } Ok(()) }; @@ -476,9 +500,14 @@ impl<'gctx> InstallablePackage<'gctx> { &self.rustc.verbose_version, ); - if let Err(e) = - remove_orphaned_bins(&self.ws, &mut tracker, &duplicates, &self.pkg, &dst) - { + if let Err(e) = remove_orphaned_bins( + &self.ws, + &mut tracker, + &duplicates, + &self.pkg, + &dst, + dry_run, + ) { // Don't hard error on remove. self.gctx .shell() @@ -515,7 +544,10 @@ impl<'gctx> InstallablePackage<'gctx> { } } - if duplicates.is_empty() { + if dry_run { + self.gctx.shell().warn("aborting install due to dry run")?; + Ok(true) + } else if duplicates.is_empty() { self.gctx.shell().status( "Installed", format!( @@ -561,7 +593,8 @@ impl<'gctx> InstallablePackage<'gctx> { // It would be best if `source` could be passed in here to avoid a // duplicate "Updating", but since `source` is taken by value, then it // wouldn't be available for `compile_ws`. - let (pkg_set, resolve) = ops::resolve_ws(&self.ws)?; + let dry_run = false; + let (pkg_set, resolve) = ops::resolve_ws(&self.ws, dry_run)?; ops::check_yanked( self.ws.gctx(), &pkg_set, @@ -619,6 +652,8 @@ pub fn install( opts: &ops::CompileOptions, force: bool, no_track: bool, + dry_run: bool, + lockfile_path: Option<&Path>, ) -> CargoResult<()> { let root = resolve_root(root, gctx)?; let dst = root.join("bin").into_path_unlocked(); @@ -650,10 +685,11 @@ pub fn install( no_track, true, current_rust_version.as_ref(), + lockfile_path, )?; let mut installed_anything = true; if let Some(installable_pkg) = installable_pkg { - installed_anything = installable_pkg.install_one()?; + installed_anything = installable_pkg.install_one(dry_run)?; } (installed_anything, false) } else { @@ -681,6 +717,7 @@ pub fn install( no_track, !did_update, current_rust_version.as_ref(), + lockfile_path, ) { Ok(Some(installable_pkg)) => { did_update = true; @@ -704,7 +741,7 @@ pub fn install( let install_results: Vec<_> = pkgs_to_install .into_iter() - .map(|(krate, installable_pkg)| (krate, installable_pkg.install_one())) + .map(|(krate, installable_pkg)| (krate, installable_pkg.install_one(dry_run))) .collect(); for (krate, result) in install_results { @@ -787,6 +824,7 @@ fn installed_exact_package( root: &Filesystem, dst: &Path, force: bool, + lockfile_path: Option<&Path>, ) -> CargoResult> where T: Source, @@ -802,7 +840,7 @@ where // best-effort check to see if we can avoid hitting the network. if let Ok(pkg) = select_dep_pkg(source, dep, gctx, false, None) { let (_ws, rustc, target) = - make_ws_rustc_target(gctx, opts, &source.source_id(), pkg.clone())?; + make_ws_rustc_target(gctx, opts, &source.source_id(), pkg.clone(), lockfile_path)?; if let Ok(true) = is_installed(&pkg, gctx, opts, &rustc, &target, root, dst, force) { return Ok(Some(pkg)); } @@ -815,6 +853,7 @@ fn make_ws_rustc_target<'gctx>( opts: &ops::CompileOptions, source_id: &SourceId, pkg: Package, + lockfile_path: Option<&Path>, ) -> CargoResult<(Workspace<'gctx>, Rustc, String)> { let mut ws = if source_id.is_git() || source_id.is_path() { Workspace::new(pkg.manifest_path(), gctx)? @@ -824,6 +863,11 @@ fn make_ws_rustc_target<'gctx>( ws }; ws.set_ignore_lock(gctx.lock_update_allowed()); + ws.set_requested_lockfile_path(lockfile_path.map(|p| p.to_path_buf())); + // if --lockfile-path is set, imply --locked + if ws.requested_lockfile_path().is_some() { + ws.set_ignore_lock(false); + } ws.set_require_optional_deps(false); let rustc = gctx.load_global_rustc(Some(&ws))?; @@ -856,6 +900,7 @@ fn remove_orphaned_bins( duplicates: &BTreeMap>, pkg: &Package, dst: &Path, + dry_run: bool, ) -> CargoResult<()> { let filter = ops::CompileFilter::new_all_targets(); let all_self_names = exe_names(pkg, &filter); @@ -893,8 +938,10 @@ fn remove_orphaned_bins( old_pkg ), )?; - paths::remove_file(&full_path) - .with_context(|| format!("failed to remove {:?}", full_path))?; + if !dry_run { + paths::remove_file(&full_path) + .with_context(|| format!("failed to remove {:?}", full_path))?; + } } } } diff --git a/src/cargo/ops/cargo_new.rs b/src/cargo/ops/cargo_new.rs index f349395b55f..a3d08bb4db0 100644 --- a/src/cargo/ops/cargo_new.rs +++ b/src/cargo/ops/cargo_new.rs @@ -802,7 +802,7 @@ fn mk(gctx: &GlobalContext, opts: &MkOptions<'_>) -> CargoResult<()> { } } - let manifest_path = path.join("Cargo.toml"); + let manifest_path = paths::normalize_path(&path.join("Cargo.toml")); if let Ok(root_manifest_path) = find_root_manifest_for_wd(&manifest_path) { let root_manifest = paths::read(&root_manifest_path)?; // Sometimes the root manifest is not a valid manifest, so we only try to parse it if it is. @@ -876,7 +876,7 @@ fn main() { " } else { b"\ -pub fn add(left: usize, right: usize) -> usize { +pub fn add(left: u64, right: u64) -> u64 { left + right } @@ -906,7 +906,7 @@ mod tests { } } - if let Err(e) = Workspace::new(&path.join("Cargo.toml"), gctx) { + if let Err(e) = Workspace::new(&manifest_path, gctx) { crate::display_warning_with_error( "compiling this new package may not work due to invalid \ workspace configuration", @@ -971,38 +971,40 @@ fn update_manifest_with_new_member( workspace_document: &mut toml_edit::DocumentMut, display_path: &str, ) -> CargoResult { + let Some(workspace) = workspace_document.get_mut("workspace") else { + return Ok(false); + }; + // If the members element already exist, check if one of the patterns // in the array already includes the new package's relative path. // - Add the relative path if the members don't match the new package's path. // - Create a new members array if there are no members element in the workspace yet. - if let Some(workspace) = workspace_document.get_mut("workspace") { - if let Some(members) = workspace - .get_mut("members") - .and_then(|members| members.as_array_mut()) - { - for member in members.iter() { - let pat = member - .as_str() - .with_context(|| format!("invalid non-string member `{}`", member))?; - let pattern = glob::Pattern::new(pat) - .with_context(|| format!("cannot build glob pattern from `{}`", pat))?; - - if pattern.matches(&display_path) { - return Ok(false); - } - } + if let Some(members) = workspace + .get_mut("members") + .and_then(|members| members.as_array_mut()) + { + for member in members.iter() { + let pat = member + .as_str() + .with_context(|| format!("invalid non-string member `{}`", member))?; + let pattern = glob::Pattern::new(pat) + .with_context(|| format!("cannot build glob pattern from `{}`", pat))?; - let was_sorted = is_sorted(members.iter().map(Value::as_str)); - members.push(display_path); - if was_sorted { - members.sort_by(|lhs, rhs| lhs.as_str().cmp(&rhs.as_str())); + if pattern.matches(&display_path) { + return Ok(false); } - } else { - let mut array = Array::new(); - array.push(display_path); + } - workspace["members"] = toml_edit::value(array); + let was_sorted = is_sorted(members.iter().map(Value::as_str)); + members.push(display_path); + if was_sorted { + members.sort_by(|lhs, rhs| lhs.as_str().cmp(&rhs.as_str())); } + } else { + let mut array = Array::new(); + array.push(display_path); + + workspace["members"] = toml_edit::value(array); } write_atomic( diff --git a/src/cargo/ops/cargo_output_metadata.rs b/src/cargo/ops/cargo_output_metadata.rs index 408be75faf3..246636d1f60 100644 --- a/src/cargo/ops/cargo_output_metadata.rs +++ b/src/cargo/ops/cargo_output_metadata.rs @@ -33,7 +33,10 @@ pub fn output_metadata(ws: &Workspace<'_>, opt: &OutputMetadataOptions) -> Cargo ); } let (packages, resolve) = if opt.no_deps { - let packages = ws.members().map(|pkg| pkg.serialized()).collect(); + let packages = ws + .members() + .map(|pkg| pkg.serialized(ws.gctx().cli_unstable(), ws.unstable_features())) + .collect(); (packages, None) } else { let (packages, resolve) = build_resolve_graph(ws, opt)?; @@ -142,6 +145,7 @@ fn build_resolve_graph( // Note that even with --filter-platform we end up downloading host dependencies as well, // as that is the behavior of download_accessible. + let dry_run = false; let ws_resolve = ops::resolve_ws_with_opts( ws, &mut target_data, @@ -150,6 +154,7 @@ fn build_resolve_graph( &specs, HasDevUnits::Yes, force_all, + dry_run, )?; let package_map: BTreeMap = ws_resolve @@ -176,7 +181,7 @@ fn build_resolve_graph( let actual_packages = package_map .into_iter() .filter_map(|(pkg_id, pkg)| node_map.get(&pkg_id).map(|_| pkg)) - .map(|pkg| pkg.serialized()) + .map(|pkg| pkg.serialized(ws.gctx().cli_unstable(), ws.unstable_features())) .collect(); let mr = MetadataResolve { diff --git a/src/cargo/ops/cargo_package.rs b/src/cargo/ops/cargo_package.rs index 7a7474825d1..239e5af2afb 100644 --- a/src/cargo/ops/cargo_package.rs +++ b/src/cargo/ops/cargo_package.rs @@ -7,19 +7,25 @@ use std::sync::Arc; use std::task::Poll; use crate::core::compiler::{BuildConfig, CompileMode, DefaultExecutor, Executor}; +use crate::core::dependency::DepKind; use crate::core::manifest::Target; use crate::core::resolver::CliFeatures; -use crate::core::{registry::PackageRegistry, resolver::HasDevUnits}; +use crate::core::resolver::HasDevUnits; use crate::core::{Feature, PackageIdSpecQuery, Shell, Verbosity, Workspace}; use crate::core::{Package, PackageId, PackageSet, Resolve, SourceId}; -use crate::sources::PathSource; +use crate::ops::lockfile::LOCKFILE_NAME; +use crate::ops::registry::{infer_registry, RegistryOrIndex}; +use crate::sources::registry::index::{IndexPackage, RegistryDependency}; +use crate::sources::{PathSource, CRATES_IO_REGISTRY}; use crate::util::cache_lock::CacheLockMode; use crate::util::context::JobsConfig; use crate::util::errors::CargoResult; use crate::util::toml::prepare_for_publish; -use crate::util::{self, human_readable_bytes, restricted_names, FileLock, GlobalContext}; +use crate::util::{ + self, human_readable_bytes, restricted_names, FileLock, Filesystem, GlobalContext, Graph, +}; use crate::{drop_println, ops}; -use anyhow::Context as _; +use anyhow::{bail, Context as _}; use cargo_util::paths; use flate2::read::GzDecoder; use flate2::{Compression, GzBuilder}; @@ -28,6 +34,7 @@ use tar::{Archive, Builder, EntryType, Header, HeaderMode}; use tracing::debug; use unicase::Ascii as UncasedAscii; +#[derive(Clone)] pub struct PackageOpts<'gctx> { pub gctx: &'gctx GlobalContext, pub list: bool, @@ -39,6 +46,7 @@ pub struct PackageOpts<'gctx> { pub to_package: ops::Packages, pub targets: Vec, pub cli_features: CliFeatures, + pub reg_or_index: Option, } const ORIGINAL_MANIFEST_FILE: &str = "Cargo.toml.orig"; @@ -80,50 +88,21 @@ struct VcsInfo { #[derive(Serialize)] struct GitVcsInfo { sha1: String, + /// Indicate whether or not the Git worktree is dirty. + #[serde(skip_serializing_if = "std::ops::Not::not")] + dirty: bool, } -pub fn package_one( +// Builds a tarball and places it in the output directory. +fn create_package( ws: &Workspace<'_>, pkg: &Package, - opts: &PackageOpts<'_>, -) -> CargoResult> { + ar_files: Vec, + local_reg: Option<&TmpRegistry<'_>>, +) -> CargoResult { let gctx = ws.gctx(); - let mut src = PathSource::new(pkg.root(), pkg.package_id().source_id(), gctx); - src.update()?; - - if opts.check_metadata { - check_metadata(pkg, gctx)?; - } - - if !pkg.manifest().exclude().is_empty() && !pkg.manifest().include().is_empty() { - gctx.shell().warn( - "both package.include and package.exclude are specified; \ - the exclude list will be ignored", - )?; - } - let src_files = src.list_files(pkg)?; - - // Check (git) repository state, getting the current commit hash if not - // dirty. - let vcs_info = if !opts.allow_dirty { - // This will error if a dirty repo is found. - check_repo_state(pkg, &src_files, gctx)? - } else { - None - }; - - let ar_files = build_ar_list(ws, pkg, src_files, vcs_info)?; - let filecount = ar_files.len(); - if opts.list { - for ar_file in ar_files { - drop_println!(gctx, "{}", ar_file.rel_str); - } - - return Ok(None); - } - // Check that the package dependencies are safe to deploy. for dep in pkg.dependencies() { super::check_dep_has_version(dep, false)?; @@ -143,18 +122,14 @@ pub fn package_one( gctx.shell() .status("Packaging", pkg.package_id().to_string())?; dst.file().set_len(0)?; - let uncompressed_size = tar(ws, pkg, ar_files, dst.file(), &filename) - .with_context(|| "failed to prepare local package for uploading")?; - if opts.verify { - dst.seek(SeekFrom::Start(0))?; - run_verify(ws, pkg, &dst, opts).with_context(|| "failed to verify package tarball")? - } + let uncompressed_size = tar(ws, pkg, local_reg, ar_files, dst.file(), &filename) + .context("failed to prepare local package for uploading")?; dst.seek(SeekFrom::Start(0))?; let src_path = dst.path(); let dst_path = dst.parent().join(&filename); fs::rename(&src_path, &dst_path) - .with_context(|| "failed to move temporary tarball into final location")?; + .context("failed to move temporary tarball into final location")?; let dst_metadata = dst .file() @@ -172,10 +147,14 @@ pub fn package_one( // It doesn't really matter if this fails. drop(gctx.shell().status("Packaged", message)); - return Ok(Some(dst)); + return Ok(dst); } -pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult>> { +/// Packages an entire workspace. +/// +/// Returns the generated package files. If `opts.list` is true, skips +/// generating package files and returns an empty list. +pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult> { let specs = &opts.to_package.to_package_id_specs(ws)?; // If -p is used, we should check spec is matched with the members (See #13719) if let ops::Packages::Packages(_) = opts.to_package { @@ -184,46 +163,240 @@ pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult