Skip to content

Commit c43aafc

Browse files
committed
uv-resolver: add filtering step after forks are combined
While we do filter based on Python markers during forking, it turns out that the markers for each fork are "combined" *after* this filtering step. But the process of combination can result in a more specific marker that is always false for the configured Python requirement. This could result in dependencies with markers that are always false (like `python_version < '0'`) appearing in the resolution. I think it would probably be better to do this check during forking, but that's a bit of a bigger refactor.
1 parent bac074d commit c43aafc

File tree

2 files changed

+19
-17
lines changed

2 files changed

+19
-17
lines changed

crates/uv-resolver/src/resolver/mod.rs

Lines changed: 18 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -316,7 +316,9 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
316316
fork_preferences
317317
.iter()
318318
.rev()
319-
.map(|fork_preference| state.clone().with_markers(fork_preference.clone()))
319+
.filter_map(|fork_preference| {
320+
state.clone().with_markers(fork_preference.clone())
321+
})
320322
.collect()
321323
}
322324
} else {
@@ -693,14 +695,17 @@ impl<InstalledPackages: InstalledPackagesProvider> ResolverState<InstalledPackag
693695
forks
694696
.into_iter()
695697
.enumerate()
696-
.map(move |(i, fork)| {
698+
.filter_map(move |(i, fork)| {
697699
let is_last = i == forks_len - 1;
698700
let forked_state = cur_state.take().unwrap();
699701
if !is_last {
700702
cur_state = Some(forked_state.clone());
701703
}
702704

703-
let mut forked_state = forked_state.with_markers(fork.markers);
705+
let markers = fork.markers.clone();
706+
Some((fork, forked_state.with_markers(markers)?))
707+
})
708+
.map(move |(fork, mut forked_state)| {
704709
forked_state.add_package_version_dependencies(
705710
for_package,
706711
version,
@@ -2320,8 +2325,16 @@ impl ForkState {
23202325

23212326
/// Subset the current markers with the new markers and update the python requirements fields
23222327
/// accordingly.
2323-
fn with_markers(mut self, markers: MarkerTree) -> Self {
2328+
fn with_markers(mut self, markers: MarkerTree) -> Option<Self> {
23242329
let combined_markers = self.markers.and(markers);
2330+
let python_marker = self.python_requirement.to_marker_tree();
2331+
if combined_markers.is_disjoint(&python_marker) {
2332+
debug!(
2333+
"Skipping split {combined_markers:?} \
2334+
because of Python requirement {python_marker:?}",
2335+
);
2336+
return None;
2337+
}
23252338

23262339
// If the fork contains a narrowed Python requirement, apply it.
23272340
let python_requirement = marker::requires_python(&combined_markers)
@@ -2335,7 +2348,7 @@ impl ForkState {
23352348
}
23362349

23372350
self.markers = ResolverMarkers::Fork(combined_markers);
2338-
self
2351+
Some(self)
23392352
}
23402353

23412354
fn into_resolution(self) -> Resolution {

crates/uv/tests/it/pip_compile.rs

Lines changed: 1 addition & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -484,14 +484,10 @@ typing_extensions==4.3.0
484484
apache-airflow==2.3.4 # via apache-airflow-providers-microsoft-azure, -r requirements.in
485485
apache-airflow-providers-common-sql==1.4.0 # via apache-airflow-providers-sqlite
486486
apache-airflow-providers-ftp==3.3.1 # via apache-airflow
487-
apache-airflow-providers-ftp==3.7.0 ; python_version < '0' # via apache-airflow
488487
apache-airflow-providers-http==4.3.0 # via apache-airflow
489-
apache-airflow-providers-http==4.10.0 ; python_version < '0' # via apache-airflow
490488
apache-airflow-providers-imap==3.1.1 # via apache-airflow
491-
apache-airflow-providers-imap==3.5.0 ; python_version < '0' # via apache-airflow
492489
apache-airflow-providers-microsoft-azure==4.2.0 # via apache-airflow, -c constraints.txt
493490
apache-airflow-providers-sqlite==3.3.2 # via apache-airflow
494-
apache-airflow-providers-sqlite==3.7.1 ; python_version < '0' # via apache-airflow
495491
apispec==3.3.2 # via flask-appbuilder
496492
argcomplete==3.2.3 # via apache-airflow
497493
asgiref==3.8.1 # via apache-airflow-providers-http, connexion, flask
@@ -501,13 +497,10 @@ typing_extensions==4.3.0
501497
azure-batch==14.1.0 # via apache-airflow-providers-microsoft-azure
502498
azure-common==1.1.28 # via azure-batch, azure-keyvault-secrets, azure-mgmt-containerinstance, azure-mgmt-datafactory, azure-mgmt-datalake-store, azure-mgmt-resource, azure-storage-common, azure-storage-file
503499
azure-core==1.29.1 # via azure-cosmos, azure-identity, azure-keyvault-secrets, azure-mgmt-core, azure-servicebus, azure-storage-blob, msrest
504-
azure-core==1.30.1 ; python_version < '0' # via azure-identity
505500
azure-cosmos==4.5.1 # via apache-airflow-providers-microsoft-azure
506-
azure-cosmos==4.6.0 ; python_version < '0' # via apache-airflow-providers-microsoft-azure
507501
azure-datalake-store==0.0.53 # via apache-airflow-providers-microsoft-azure
508502
azure-identity==1.10.0 # via apache-airflow-providers-microsoft-azure, -c constraints.txt
509503
azure-keyvault-secrets==4.7.0 # via apache-airflow-providers-microsoft-azure
510-
azure-keyvault-secrets==4.8.0 ; python_version < '0' # via apache-airflow-providers-microsoft-azure
511504
azure-kusto-data==0.0.45 # via apache-airflow-providers-microsoft-azure
512505
azure-mgmt-containerinstance==1.5.0 # via apache-airflow-providers-microsoft-azure
513506
azure-mgmt-core==1.4.0 # via azure-mgmt-datafactory, azure-mgmt-resource
@@ -523,11 +516,9 @@ typing_extensions==4.3.0
523516
azure-storage-file==2.1.0 # via apache-airflow-providers-microsoft-azure
524517
babel==2.14.0 # via flask-babel
525518
black==22.12.0 # via -r requirements.in
526-
black==24.3.0 ; python_version < '0' # via -r requirements.in
527519
blinker==1.7.0 # via apache-airflow
528520
cachelib==0.9.0 # via flask-caching
529521
cattrs==23.1.2 # via apache-airflow
530-
cattrs==23.2.3 ; python_version < '0' # via apache-airflow
531522
certifi==2024.2.2 # via httpcore, httpx, msrest, requests
532523
cffi==1.16.0 # via azure-datalake-store, cryptography
533524
charset-normalizer==3.3.2 # via requests
@@ -571,7 +562,6 @@ typing_extensions==4.3.0
571562
jinja2==3.1.3 # via apache-airflow, connexion, flask, flask-babel, python-nvd3, swagger-ui-bundle
572563
joblib==1.3.2 # via scikit-learn
573564
jsonschema==4.17.3 # via apache-airflow, connexion, flask-appbuilder
574-
jsonschema==4.21.1 ; python_version < '0' # via apache-airflow, flask-appbuilder
575565
lazy-object-proxy==1.10.0 # via apache-airflow
576566
linkify-it-py==2.0.3 # via apache-airflow
577567
lockfile==0.12.2 # via apache-airflow, python-daemon
@@ -626,7 +616,6 @@ typing_extensions==4.3.0
626616
requests-oauthlib==2.0.0 # via msrest
627617
requests-toolbelt==1.0.0 # via apache-airflow-providers-http
628618
rich==13.3.1 # via apache-airflow
629-
rich==13.7.1 ; python_version < '0' # via apache-airflow
630619
scikit-learn==1.2.2 # via -r requirements.in
631620
scipy==1.12.0 # via scikit-learn
632621
setproctitle==1.3.3 # via apache-airflow
@@ -660,7 +649,7 @@ typing_extensions==4.3.0
660649
yarl==1.9.4 # via aiohttp
661650
662651
----- stderr -----
663-
Resolved 183 packages in [TIME]
652+
Resolved 172 packages in [TIME]
664653
"###
665654
);
666655

0 commit comments

Comments
 (0)