Skip to content

Commit cff4d7d

Browse files
abnSecrusradoering
authored
repositories: add support for PEP 658 (#5509)
Co-authored-by: Bartosz Sokorski <[email protected]> Co-authored-by: Randy Döring <[email protected]>
1 parent b49d2dd commit cff4d7d

16 files changed

+583
-54
lines changed

src/poetry/repositories/http_repository.py

+85-27
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33
import functools
44
import hashlib
55

6-
from collections import defaultdict
76
from contextlib import contextmanager
87
from pathlib import Path
98
from typing import TYPE_CHECKING
@@ -13,13 +12,14 @@
1312
import requests
1413
import requests.adapters
1514

15+
from packaging.metadata import parse_email
1616
from poetry.core.constraints.version import parse_constraint
1717
from poetry.core.packages.dependency import Dependency
18-
from poetry.core.packages.utils.link import Link
1918
from poetry.core.utils.helpers import temporary_directory
2019
from poetry.core.version.markers import parse_marker
2120

2221
from poetry.config.config import Config
22+
from poetry.inspection.info import PackageInfo
2323
from poetry.inspection.lazy_wheel import HTTPRangeRequestUnsupported
2424
from poetry.inspection.lazy_wheel import metadata_from_wheel_url
2525
from poetry.repositories.cached_repository import CachedRepository
@@ -36,8 +36,8 @@
3636

3737
if TYPE_CHECKING:
3838
from packaging.utils import NormalizedName
39+
from poetry.core.packages.utils.link import Link
3940

40-
from poetry.inspection.info import PackageInfo
4141
from poetry.repositories.link_sources.base import LinkSource
4242
from poetry.utils.authenticator import RepositoryCertificateConfig
4343

@@ -109,10 +109,9 @@ def _cached_or_downloaded_file(
109109
)
110110
yield filepath
111111

112-
def _get_info_from_wheel(self, url: str) -> PackageInfo:
112+
def _get_info_from_wheel(self, link: Link) -> PackageInfo:
113113
from poetry.inspection.info import PackageInfo
114114

115-
link = Link(url)
116115
netloc = link.netloc
117116

118117
# If "lazy-wheel" is enabled and the domain supports range requests
@@ -147,17 +146,68 @@ def _get_info_from_wheel(self, url: str) -> PackageInfo:
147146
level="debug",
148147
)
149148
self._supports_range_requests[netloc] = True
150-
return self._get_info_from_wheel(link.url)
149+
return self._get_info_from_wheel(link)
151150

152-
def _get_info_from_sdist(self, url: str) -> PackageInfo:
151+
def _get_info_from_sdist(self, link: Link) -> PackageInfo:
153152
from poetry.inspection.info import PackageInfo
154153

155-
with self._cached_or_downloaded_file(Link(url)) as filepath:
154+
with self._cached_or_downloaded_file(link) as filepath:
156155
return PackageInfo.from_sdist(filepath)
157156

158-
def _get_info_from_urls(self, urls: dict[str, list[str]]) -> PackageInfo:
157+
def _get_info_from_metadata(self, link: Link) -> PackageInfo | None:
158+
if link.has_metadata:
159+
try:
160+
assert link.metadata_url is not None
161+
response = self.session.get(link.metadata_url)
162+
if link.metadata_hashes and (
163+
hash_name := get_highest_priority_hash_type(
164+
set(link.metadata_hashes.keys()), f"{link.filename}.metadata"
165+
)
166+
):
167+
metadata_hash = getattr(hashlib, hash_name)(
168+
response.text.encode()
169+
).hexdigest()
170+
if metadata_hash != link.metadata_hashes[hash_name]:
171+
self._log(
172+
f"Metadata file hash ({metadata_hash}) does not match"
173+
f" expected hash ({link.metadata_hashes[hash_name]})."
174+
f" Metadata file for {link.filename} will be ignored.",
175+
level="warning",
176+
)
177+
return None
178+
179+
metadata, _ = parse_email(response.content)
180+
return PackageInfo.from_metadata(metadata)
181+
182+
except requests.HTTPError:
183+
self._log(
184+
f"Failed to retrieve metadata at {link.metadata_url}",
185+
level="warning",
186+
)
187+
188+
return None
189+
190+
def _get_info_from_links(
191+
self,
192+
links: list[Link],
193+
*,
194+
ignore_yanked: bool = True,
195+
) -> PackageInfo:
196+
# Sort links by distribution type
197+
wheels: list[Link] = []
198+
sdists: list[Link] = []
199+
for link in links:
200+
if link.yanked and ignore_yanked:
201+
# drop yanked files unless the entire release is yanked
202+
continue
203+
if link.is_wheel:
204+
wheels.append(link)
205+
elif link.filename.endswith(
206+
(".tar.gz", ".zip", ".bz2", ".xz", ".Z", ".tar")
207+
):
208+
sdists.append(link)
209+
159210
# Prefer to read data from wheels: this is faster and more reliable
160-
wheels = urls.get("bdist_wheel")
161211
if wheels:
162212
# We ought just to be able to look at any of the available wheels to read
163213
# metadata, they all should give the same answer.
@@ -173,8 +223,7 @@ def _get_info_from_urls(self, urls: dict[str, list[str]]) -> PackageInfo:
173223
universal_python3_wheel = None
174224
platform_specific_wheels = []
175225
for wheel in wheels:
176-
link = Link(wheel)
177-
m = wheel_file_re.match(link.filename)
226+
m = wheel_file_re.match(wheel.filename)
178227
if not m:
179228
continue
180229

@@ -194,13 +243,19 @@ def _get_info_from_urls(self, urls: dict[str, list[str]]) -> PackageInfo:
194243
platform_specific_wheels.append(wheel)
195244

196245
if universal_wheel is not None:
197-
return self._get_info_from_wheel(universal_wheel)
246+
return self._get_info_from_metadata(
247+
universal_wheel
248+
) or self._get_info_from_wheel(universal_wheel)
198249

199250
info = None
200251
if universal_python2_wheel and universal_python3_wheel:
201-
info = self._get_info_from_wheel(universal_python2_wheel)
252+
info = self._get_info_from_metadata(
253+
universal_python2_wheel
254+
) or self._get_info_from_wheel(universal_python2_wheel)
202255

203-
py3_info = self._get_info_from_wheel(universal_python3_wheel)
256+
py3_info = self._get_info_from_metadata(
257+
universal_python3_wheel
258+
) or self._get_info_from_wheel(universal_python3_wheel)
204259

205260
if info.requires_python or py3_info.requires_python:
206261
info.requires_python = str(
@@ -250,35 +305,37 @@ def _get_info_from_urls(self, urls: dict[str, list[str]]) -> PackageInfo:
250305

251306
# Prefer non platform specific wheels
252307
if universal_python3_wheel:
253-
return self._get_info_from_wheel(universal_python3_wheel)
308+
return self._get_info_from_metadata(
309+
universal_python3_wheel
310+
) or self._get_info_from_wheel(universal_python3_wheel)
254311

255312
if universal_python2_wheel:
256-
return self._get_info_from_wheel(universal_python2_wheel)
313+
return self._get_info_from_metadata(
314+
universal_python2_wheel
315+
) or self._get_info_from_wheel(universal_python2_wheel)
257316

258317
if platform_specific_wheels:
259318
first_wheel = platform_specific_wheels[0]
260-
return self._get_info_from_wheel(first_wheel)
319+
return self._get_info_from_metadata(
320+
first_wheel
321+
) or self._get_info_from_wheel(first_wheel)
261322

262-
return self._get_info_from_sdist(urls["sdist"][0])
323+
return self._get_info_from_metadata(sdists[0]) or self._get_info_from_sdist(
324+
sdists[0]
325+
)
263326

264327
def _links_to_data(self, links: list[Link], data: PackageInfo) -> dict[str, Any]:
265328
if not links:
266329
raise PackageNotFound(
267330
f'No valid distribution links found for package: "{data.name}" version:'
268331
f' "{data.version}"'
269332
)
270-
urls = defaultdict(list)
333+
271334
files: list[dict[str, Any]] = []
272335
for link in links:
273336
if link.yanked and not data.yanked:
274337
# drop yanked files unless the entire release is yanked
275338
continue
276-
if link.is_wheel:
277-
urls["bdist_wheel"].append(link.url)
278-
elif link.filename.endswith(
279-
(".tar.gz", ".zip", ".bz2", ".xz", ".Z", ".tar")
280-
):
281-
urls["sdist"].append(link.url)
282339

283340
file_hash: str | None
284341
for hash_name in ("sha512", "sha384", "sha256"):
@@ -299,7 +356,8 @@ def _links_to_data(self, links: list[Link], data: PackageInfo) -> dict[str, Any]
299356

300357
data.files = files
301358

302-
info = self._get_info_from_urls(urls)
359+
# drop yanked files unless the entire release is yanked
360+
info = self._get_info_from_links(links, ignore_yanked=not data.yanked)
303361

304362
data.summary = info.summary
305363
data.requires_dist = info.requires_dist

src/poetry/repositories/link_sources/html.py

+15-1
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,21 @@ def _link_cache(self) -> LinkCache:
4242
yanked = unescape(yanked_value)
4343
else:
4444
yanked = "data-yanked" in anchor
45-
link = Link(url, requires_python=pyrequire, yanked=yanked)
45+
46+
# see https://peps.python.org/pep-0714/#clients
47+
# and https://peps.python.org/pep-0658/#specification
48+
metadata: str | bool
49+
for metadata_key in ("data-core-metadata", "data-dist-info-metadata"):
50+
metadata_value = anchor.get(metadata_key)
51+
if metadata_value:
52+
metadata = unescape(metadata_value)
53+
else:
54+
metadata = metadata_key in anchor
55+
if metadata:
56+
break
57+
link = Link(
58+
url, requires_python=pyrequire, yanked=yanked, metadata=metadata
59+
)
4660

4761
if link.ext not in self.SUPPORTED_FORMATS:
4862
continue

src/poetry/repositories/link_sources/json.py

+16-1
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,22 @@ def _link_cache(self) -> LinkCache:
2828
url = file["url"]
2929
requires_python = file.get("requires-python")
3030
yanked = file.get("yanked", False)
31-
link = Link(url, requires_python=requires_python, yanked=yanked)
31+
32+
# see https://peps.python.org/pep-0714/#clients
33+
# and https://peps.python.org/pep-0691/#project-detail
34+
metadata: dict[str, str] | bool = False
35+
for metadata_key in ("core-metadata", "dist-info-metadata"):
36+
if metadata_key in file:
37+
metadata_value = file[metadata_key]
38+
if metadata_value and isinstance(metadata_value, dict):
39+
metadata = metadata_value
40+
else:
41+
metadata = bool(metadata_value)
42+
break
43+
44+
link = Link(
45+
url, requires_python=requires_python, yanked=yanked, metadata=metadata
46+
)
3247

3348
if link.ext not in self.SUPPORTED_FORMATS:
3449
continue

src/poetry/repositories/pypi_repository.py

+9-17
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@
22

33
import logging
44

5-
from collections import defaultdict
65
from typing import TYPE_CHECKING
76
from typing import Any
87

@@ -162,25 +161,18 @@ def _get_release_info(
162161
data.files = files
163162

164163
if self._fallback and data.requires_dist is None:
165-
self._log("No dependencies found, downloading archives", level="debug")
164+
self._log(
165+
"No dependencies found, downloading metadata and/or archives",
166+
level="debug",
167+
)
166168
# No dependencies set (along with other information)
167169
# This might be due to actually no dependencies
168-
# or badly set metadata when uploading
170+
# or badly set metadata when uploading.
169171
# So, we need to make sure there is actually no
170-
# dependencies by introspecting packages
171-
urls = defaultdict(list)
172-
for url in json_data["urls"]:
173-
# Only get sdist and wheels if they exist
174-
dist_type = url["packagetype"]
175-
if dist_type not in SUPPORTED_PACKAGE_TYPES:
176-
continue
177-
178-
urls[dist_type].append(url["url"])
179-
180-
if not urls:
181-
return data.asdict()
182-
183-
info = self._get_info_from_urls(urls)
172+
# dependencies by introspecting packages.
173+
page = self.get_page(name)
174+
links = list(page.links_for_version(name, version))
175+
info = self._get_info_from_links(links)
184176

185177
data.requires_dist = info.requires_dist
186178

tests/repositories/conftest.py

+28
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,18 @@
11
from __future__ import annotations
22

3+
import posixpath
4+
5+
from pathlib import Path
36
from typing import TYPE_CHECKING
7+
from typing import Any
48

59
import pytest
10+
import requests
611

712

813
if TYPE_CHECKING:
914
from tests.types import HTMLPageGetter
15+
from tests.types import RequestsSessionGet
1016

1117

1218
@pytest.fixture
@@ -29,3 +35,25 @@ def _fixture(content: str, base_url: str | None = None) -> str:
2935
"""
3036

3137
return _fixture
38+
39+
40+
@pytest.fixture
41+
def get_metadata_mock() -> RequestsSessionGet:
42+
def metadata_mock(url: str, **__: Any) -> requests.Response:
43+
if url.endswith(".metadata"):
44+
response = requests.Response()
45+
response.encoding = "application/text"
46+
response._content = (
47+
(
48+
Path(__file__).parent
49+
/ "fixtures"
50+
/ "metadata"
51+
/ posixpath.basename(url)
52+
)
53+
.read_text()
54+
.encode()
55+
)
56+
return response
57+
raise requests.HTTPError()
58+
59+
return metadata_mock
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
<!DOCTYPE html>
2+
<html>
3+
<head>
4+
<title>Links for isort</title>
5+
</head>
6+
<body>
7+
<h1>Links for isort</h1>
8+
<a href="https://files.pythonhosted.org/packages/1f/2c/non-existant/isort-metadata-4.3.4-py3-none-any.whl#sha256=1153601da39a25b14ddc54955dbbacbb6b2d19135386699e2ad58517953b34af"
9+
data-dist-info-metadata="sha256=e360bf0ed8a06390513d50dd5b7e9d635c789853a93b84163f9de4ae0647580c">isort-metadata-4.3.4-py3-none-any.whl</a><br/>
10+
</body>
11+
</html>
12+
<!--SERIAL 3575149-->
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
Metadata-Version: 2.0
2+
Name: isort-metadata
3+
Version: 4.3.4
4+
Summary: A Python utility / library to sort Python imports.
5+
Home-page: https://github.com/timothycrosley/isort
6+
Author: Timothy Crosley
7+
Author-email: [email protected]
8+
License: MIT
9+
Keywords: Refactor,Python,Python2,Python3,Refactoring,Imports,Sort,Clean
10+
Platform: UNKNOWN
11+
Classifier: Development Status :: 6 - Mature
12+
Classifier: Intended Audience :: Developers
13+
Classifier: Natural Language :: English
14+
Classifier: Environment :: Console
15+
Classifier: License :: OSI Approved :: MIT License
16+
Classifier: Programming Language :: Python
17+
Classifier: Programming Language :: Python :: 2
18+
Classifier: Programming Language :: Python :: 2.7
19+
Classifier: Programming Language :: Python :: 3
20+
Classifier: Programming Language :: Python :: 3.4
21+
Classifier: Programming Language :: Python :: 3.5
22+
Classifier: Programming Language :: Python :: 3.6
23+
Classifier: Programming Language :: Python :: Implementation :: CPython
24+
Classifier: Programming Language :: Python :: Implementation :: PyPy
25+
Classifier: Topic :: Software Development :: Libraries
26+
Classifier: Topic :: Utilities
27+
Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
28+
Requires-Dist: futures; python_version=="2.7"

0 commit comments

Comments
 (0)