10
10
from typing import Any
11
11
from typing import Iterator
12
12
13
+ import pkginfo
13
14
import requests
14
15
import requests .adapters
15
16
19
20
from poetry .core .utils .helpers import temporary_directory
20
21
from poetry .core .version .markers import parse_marker
21
22
23
+ from poetry .inspection .info import PackageInfo
22
24
from poetry .repositories .cached_repository import CachedRepository
23
25
from poetry .repositories .exceptions import PackageNotFound
24
26
from poetry .repositories .exceptions import RepositoryError
33
35
from packaging .utils import NormalizedName
34
36
35
37
from poetry .config .config import Config
36
- from poetry .inspection .info import PackageInfo
37
38
from poetry .repositories .link_sources .base import LinkSource
38
39
from poetry .utils .authenticator import RepositoryCertificateConfig
39
40
@@ -97,10 +98,29 @@ def _get_info_from_sdist(self, url: str) -> PackageInfo:
97
98
with self ._cached_or_downloaded_file (Link (url )) as filepath :
98
99
return PackageInfo .from_sdist (filepath )
99
100
100
- def _get_info_from_urls (self , urls : dict [str , list [str ]]) -> PackageInfo :
101
+ @staticmethod
102
+ def _get_info_from_metadata (
103
+ url : str , metadata : dict [str , pkginfo .Distribution ]
104
+ ) -> PackageInfo | None :
105
+ if url in metadata :
106
+ dist = metadata [url ]
107
+ return PackageInfo (
108
+ name = dist .name ,
109
+ version = dist .version ,
110
+ summary = dist .summary ,
111
+ requires_dist = list (dist .requires_dist ),
112
+ requires_python = dist .requires_python ,
113
+ )
114
+ return None
115
+
116
+ def _get_info_from_urls (
117
+ self ,
118
+ urls : dict [str , list [str ]],
119
+ metadata : dict [str , pkginfo .Distribution ] | None = None ,
120
+ ) -> PackageInfo :
121
+ metadata = metadata or {}
101
122
# Prefer to read data from wheels: this is faster and more reliable
102
- wheels = urls .get ("bdist_wheel" )
103
- if wheels :
123
+ if wheels := urls .get ("bdist_wheel" ):
104
124
# We ought just to be able to look at any of the available wheels to read
105
125
# metadata, they all should give the same answer.
106
126
#
@@ -136,13 +156,19 @@ def _get_info_from_urls(self, urls: dict[str, list[str]]) -> PackageInfo:
136
156
platform_specific_wheels .append (wheel )
137
157
138
158
if universal_wheel is not None :
139
- return self ._get_info_from_wheel (universal_wheel )
159
+ return self ._get_info_from_metadata (
160
+ universal_wheel , metadata
161
+ ) or self ._get_info_from_wheel (universal_wheel )
140
162
141
163
info = None
142
164
if universal_python2_wheel and universal_python3_wheel :
143
- info = self ._get_info_from_wheel (universal_python2_wheel )
165
+ info = self ._get_info_from_metadata (
166
+ universal_python2_wheel , metadata
167
+ ) or self ._get_info_from_wheel (universal_python2_wheel )
144
168
145
- py3_info = self ._get_info_from_wheel (universal_python3_wheel )
169
+ py3_info = self ._get_info_from_metadata (
170
+ universal_python3_wheel , metadata
171
+ ) or self ._get_info_from_wheel (universal_python3_wheel )
146
172
147
173
if info .requires_python or py3_info .requires_python :
148
174
info .requires_python = str (
@@ -192,16 +218,24 @@ def _get_info_from_urls(self, urls: dict[str, list[str]]) -> PackageInfo:
192
218
193
219
# Prefer non platform specific wheels
194
220
if universal_python3_wheel :
195
- return self ._get_info_from_wheel (universal_python3_wheel )
221
+ return self ._get_info_from_metadata (
222
+ universal_python3_wheel , metadata
223
+ ) or self ._get_info_from_wheel (universal_python3_wheel )
196
224
197
225
if universal_python2_wheel :
198
- return self ._get_info_from_wheel (universal_python2_wheel )
226
+ return self ._get_info_from_metadata (
227
+ universal_python2_wheel , metadata
228
+ ) or self ._get_info_from_wheel (universal_python2_wheel )
199
229
200
230
if platform_specific_wheels :
201
231
first_wheel = platform_specific_wheels [0 ]
202
- return self ._get_info_from_wheel (first_wheel )
232
+ return self ._get_info_from_metadata (
233
+ first_wheel , metadata
234
+ ) or self ._get_info_from_wheel (first_wheel )
203
235
204
- return self ._get_info_from_sdist (urls ["sdist" ][0 ])
236
+ return self ._get_info_from_metadata (
237
+ urls ["sdist" ][0 ], metadata
238
+ ) or self ._get_info_from_sdist (urls ["sdist" ][0 ])
205
239
206
240
def _links_to_data (self , links : list [Link ], data : PackageInfo ) -> dict [str , Any ]:
207
241
if not links :
@@ -210,11 +244,37 @@ def _links_to_data(self, links: list[Link], data: PackageInfo) -> dict[str, Any]
210
244
f' "{ data .version } "'
211
245
)
212
246
urls = defaultdict (list )
247
+ metadata = {}
213
248
files : list [dict [str , Any ]] = []
214
249
for link in links :
215
250
if link .yanked and not data .yanked :
216
251
# drop yanked files unless the entire release is yanked
217
252
continue
253
+ if link .has_metadata :
254
+ try :
255
+ assert link .metadata_url is not None
256
+ response = self .session .get (link .metadata_url )
257
+ distribution = pkginfo .Distribution ()
258
+ assert link .metadata_hash_name is not None
259
+ metadata_hash = getattr (hashlib , link .metadata_hash_name )(
260
+ response .text .encode ()
261
+ ).hexdigest ()
262
+
263
+ if metadata_hash != link .metadata_hash :
264
+ self ._log (
265
+ f"Metadata file hash ({ metadata_hash } ) does not match"
266
+ f" expected hash ({ link .metadata_hash } )." ,
267
+ level = "warning" ,
268
+ )
269
+
270
+ distribution .parse (response .content )
271
+ metadata [link .url ] = distribution
272
+ except requests .HTTPError :
273
+ self ._log (
274
+ f"Failed to retrieve metadata at { link .metadata_url } " ,
275
+ level = "debug" ,
276
+ )
277
+
218
278
if link .is_wheel :
219
279
urls ["bdist_wheel" ].append (link .url )
220
280
elif link .filename .endswith (
@@ -235,7 +295,7 @@ def _links_to_data(self, links: list[Link], data: PackageInfo) -> dict[str, Any]
235
295
236
296
data .files = files
237
297
238
- info = self ._get_info_from_urls (urls )
298
+ info = self ._get_info_from_urls (urls , metadata )
239
299
240
300
data .summary = info .summary
241
301
data .requires_dist = info .requires_dist
0 commit comments