Skip to content

Commit aef9975

Browse files
abnSecrus
andcommitted
repositories: add support for PEP 658
This change allows Poetry to make use of PEP 503 "simple" API repositories that implement PEP 658 for core metadata. Co-authored-by: Bartosz Sokorski <[email protected]>
1 parent c5d2c6b commit aef9975

File tree

5 files changed

+154
-15
lines changed

5 files changed

+154
-15
lines changed

src/poetry/repositories/http_repository.py

+74-12
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010
from typing import Any
1111
from typing import Iterator
1212

13+
import pkginfo
1314
import requests
1415

1516
from poetry.core.constraints.version import parse_constraint
@@ -18,6 +19,7 @@
1819
from poetry.core.utils.helpers import temporary_directory
1920
from poetry.core.version.markers import parse_marker
2021

22+
from poetry.inspection.info import PackageInfo
2123
from poetry.repositories.cached_repository import CachedRepository
2224
from poetry.repositories.exceptions import PackageNotFound
2325
from poetry.repositories.exceptions import RepositoryError
@@ -32,7 +34,6 @@
3234
from packaging.utils import NormalizedName
3335

3436
from poetry.config.config import Config
35-
from poetry.inspection.info import PackageInfo
3637
from poetry.repositories.link_sources.base import LinkSource
3738
from poetry.utils.authenticator import RepositoryCertificateConfig
3839

@@ -98,10 +99,29 @@ def _get_info_from_sdist(self, url: str) -> PackageInfo:
9899
with self._cached_or_downloaded_file(Link(url)) as filepath:
99100
return PackageInfo.from_sdist(filepath)
100101

101-
def _get_info_from_urls(self, urls: dict[str, list[str]]) -> PackageInfo:
102+
@staticmethod
103+
def _get_info_from_metadata(
104+
url: str, metadata: dict[str, pkginfo.Distribution]
105+
) -> PackageInfo | None:
106+
if url in metadata:
107+
dist = metadata[url]
108+
return PackageInfo(
109+
name=dist.name,
110+
version=dist.version,
111+
summary=dist.summary,
112+
requires_dist=list(dist.requires_dist),
113+
requires_python=dist.requires_python,
114+
)
115+
return None
116+
117+
def _get_info_from_urls(
118+
self,
119+
urls: dict[str, list[str]],
120+
metadata: dict[str, pkginfo.Distribution] | None = None,
121+
) -> PackageInfo:
122+
metadata = metadata or {}
102123
# Prefer to read data from wheels: this is faster and more reliable
103-
wheels = urls.get("bdist_wheel")
104-
if wheels:
124+
if wheels := urls.get("bdist_wheel"):
105125
# We ought just to be able to look at any of the available wheels to read
106126
# metadata, they all should give the same answer.
107127
#
@@ -137,13 +157,19 @@ def _get_info_from_urls(self, urls: dict[str, list[str]]) -> PackageInfo:
137157
platform_specific_wheels.append(wheel)
138158

139159
if universal_wheel is not None:
140-
return self._get_info_from_wheel(universal_wheel)
160+
return self._get_info_from_metadata(
161+
universal_wheel, metadata
162+
) or self._get_info_from_wheel(universal_wheel)
141163

142164
info = None
143165
if universal_python2_wheel and universal_python3_wheel:
144-
info = self._get_info_from_wheel(universal_python2_wheel)
166+
info = self._get_info_from_metadata(
167+
universal_python2_wheel, metadata
168+
) or self._get_info_from_wheel(universal_python2_wheel)
145169

146-
py3_info = self._get_info_from_wheel(universal_python3_wheel)
170+
py3_info = self._get_info_from_metadata(
171+
universal_python3_wheel, metadata
172+
) or self._get_info_from_wheel(universal_python3_wheel)
147173

148174
if info.requires_python or py3_info.requires_python:
149175
info.requires_python = str(
@@ -193,16 +219,24 @@ def _get_info_from_urls(self, urls: dict[str, list[str]]) -> PackageInfo:
193219

194220
# Prefer non platform specific wheels
195221
if universal_python3_wheel:
196-
return self._get_info_from_wheel(universal_python3_wheel)
222+
return self._get_info_from_metadata(
223+
universal_python3_wheel, metadata
224+
) or self._get_info_from_wheel(universal_python3_wheel)
197225

198226
if universal_python2_wheel:
199-
return self._get_info_from_wheel(universal_python2_wheel)
227+
return self._get_info_from_metadata(
228+
universal_python2_wheel, metadata
229+
) or self._get_info_from_wheel(universal_python2_wheel)
200230

201231
if platform_specific_wheels:
202232
first_wheel = platform_specific_wheels[0]
203-
return self._get_info_from_wheel(first_wheel)
233+
return self._get_info_from_metadata(
234+
first_wheel, metadata
235+
) or self._get_info_from_wheel(first_wheel)
204236

205-
return self._get_info_from_sdist(urls["sdist"][0])
237+
return self._get_info_from_metadata(
238+
urls["sdist"][0], metadata
239+
) or self._get_info_from_sdist(urls["sdist"][0])
206240

207241
def _links_to_data(self, links: list[Link], data: PackageInfo) -> dict[str, Any]:
208242
if not links:
@@ -211,11 +245,39 @@ def _links_to_data(self, links: list[Link], data: PackageInfo) -> dict[str, Any]
211245
f' "{data.version}"'
212246
)
213247
urls = defaultdict(list)
248+
metadata = {}
214249
files: list[dict[str, Any]] = []
215250
for link in links:
216251
if link.yanked and not data.yanked:
217252
# drop yanked files unless the entire release is yanked
218253
continue
254+
if link.has_metadata:
255+
try:
256+
assert link.metadata_url is not None
257+
response = self.session.get(link.metadata_url)
258+
distribution = pkginfo.Distribution()
259+
assert link.metadata_hash_name is not None
260+
metadata_hash = getattr(hashlib, link.metadata_hash_name)(
261+
response.text.encode()
262+
).hexdigest()
263+
264+
if metadata_hash != link.metadata_hash:
265+
self._log(
266+
(
267+
f"Metadata file hash ({metadata_hash}) does not match"
268+
f" expected hash ({link.metadata_hash})."
269+
),
270+
level="warning",
271+
)
272+
273+
distribution.parse(response.content)
274+
metadata[link.url] = distribution
275+
except requests.HTTPError:
276+
self._log(
277+
f"Failed to retrieve metadata at {link.metadata_url}",
278+
level="debug",
279+
)
280+
219281
if link.is_wheel:
220282
urls["bdist_wheel"].append(link.url)
221283
elif link.filename.endswith(
@@ -253,7 +315,7 @@ def _links_to_data(self, links: list[Link], data: PackageInfo) -> dict[str, Any]
253315

254316
data.files = files
255317

256-
info = self._get_info_from_urls(urls)
318+
info = self._get_info_from_urls(urls, metadata)
257319

258320
data.summary = info.summary
259321
data.requires_dist = info.requires_dist

src/poetry/repositories/link_sources/html.py

+5-3
Original file line numberDiff line numberDiff line change
@@ -32,8 +32,7 @@ def __init__(self, url: str, content: str) -> None:
3232
def _link_cache(self) -> LinkCache:
3333
links: LinkCache = defaultdict(lambda: defaultdict(list))
3434
for anchor in self._parsed.findall(".//a"):
35-
if anchor.get("href"):
36-
href = anchor.get("href")
35+
if href := anchor.get("href"):
3736
url = self.clean_link(urllib.parse.urljoin(self._url, href))
3837
pyrequire = anchor.get("data-requires-python")
3938
pyrequire = unescape(pyrequire) if pyrequire else None
@@ -43,7 +42,10 @@ def _link_cache(self) -> LinkCache:
4342
yanked = unescape(yanked_value)
4443
else:
4544
yanked = "data-yanked" in anchor.attrib
46-
link = Link(url, requires_python=pyrequire, yanked=yanked)
45+
metadata = anchor.get("data-dist-info-metadata")
46+
link = Link(
47+
url, requires_python=pyrequire, yanked=yanked, metadata=metadata
48+
)
4749

4850
if link.ext not in self.SUPPORTED_FORMATS:
4951
continue
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
<!DOCTYPE html>
2+
<html>
3+
<head>
4+
<title>Links for isort</title>
5+
</head>
6+
<body>
7+
<h1>Links for isort</h1>
8+
<a href="https://files.pythonhosted.org/packages/1f/2c/non-existant/isort-metadata-4.3.4-py3-none-any.whl#sha256=1153601da39a25b14ddc54955dbbacbb6b2d19135386699e2ad58517953b34af"
9+
data-dist-info-metadata="sha256=e360bf0ed8a06390513d50dd5b7e9d635c789853a93b84163f9de4ae0647580c">isort-metadata-4.3.4-py3-none-any.whl</a><br/>
10+
</body>
11+
</html>
12+
<!--SERIAL 3575149-->
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
Metadata-Version: 2.0
2+
Name: isort-metadata
3+
Version: 4.3.4
4+
Summary: A Python utility / library to sort Python imports.
5+
Home-page: https://github.com/timothycrosley/isort
6+
Author: Timothy Crosley
7+
Author-email: [email protected]
8+
License: MIT
9+
Keywords: Refactor,Python,Python2,Python3,Refactoring,Imports,Sort,Clean
10+
Platform: UNKNOWN
11+
Classifier: Development Status :: 6 - Mature
12+
Classifier: Intended Audience :: Developers
13+
Classifier: Natural Language :: English
14+
Classifier: Environment :: Console
15+
Classifier: License :: OSI Approved :: MIT License
16+
Classifier: Programming Language :: Python
17+
Classifier: Programming Language :: Python :: 2
18+
Classifier: Programming Language :: Python :: 2.7
19+
Classifier: Programming Language :: Python :: 3
20+
Classifier: Programming Language :: Python :: 3.4
21+
Classifier: Programming Language :: Python :: 3.5
22+
Classifier: Programming Language :: Python :: 3.6
23+
Classifier: Programming Language :: Python :: Implementation :: CPython
24+
Classifier: Programming Language :: Python :: Implementation :: PyPy
25+
Classifier: Topic :: Software Development :: Libraries
26+
Classifier: Topic :: Utilities
27+
Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*
28+
Requires-Dist: futures; python_version=="2.7"

tests/repositories/test_legacy_repository.py

+35
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
from __future__ import annotations
22

33
import base64
4+
import posixpath
45
import re
56
import shutil
67

@@ -23,10 +24,13 @@
2324

2425

2526
if TYPE_CHECKING:
27+
from typing import Any
28+
2629
import httpretty
2730

2831
from _pytest.monkeypatch import MonkeyPatch
2932
from packaging.utils import NormalizedName
33+
from pytest_mock import MockerFixture
3034

3135
from poetry.config.config import Config
3236

@@ -149,6 +153,37 @@ def test_get_package_information_fallback_read_setup() -> None:
149153
)
150154

151155

156+
def _get_mock(url: str, **__: Any) -> requests.Response:
157+
if url.endswith(".metadata"):
158+
response = requests.Response()
159+
response.encoding = "application/text"
160+
response._content = MockRepository.FIXTURES.joinpath(
161+
"metadata", posixpath.basename(url)
162+
).read_bytes()
163+
return response
164+
raise requests.HTTPError()
165+
166+
167+
def test_get_package_information_pep_658(mocker: MockerFixture) -> None:
168+
repo = MockRepository()
169+
170+
isort_package = repo.package("isort", Version.parse("4.3.4"))
171+
172+
mocker.patch.object(repo.session, "get", _get_mock)
173+
174+
try:
175+
package = repo.package("isort-metadata", Version.parse("4.3.4"))
176+
except FileNotFoundError:
177+
pytest.fail("Metadata was not successfully retrieved")
178+
else:
179+
assert package.source_type == isort_package.source_type == "legacy"
180+
assert package.source_reference == isort_package.source_reference == repo.name
181+
assert package.source_url == isort_package.source_url == repo.url
182+
assert package.name == "isort-metadata"
183+
assert package.version.text == isort_package.version.text == "4.3.4"
184+
assert package.description == isort_package.description
185+
186+
152187
def test_get_package_information_skips_dependencies_with_invalid_constraints() -> None:
153188
repo = MockRepository()
154189

0 commit comments

Comments
 (0)