3
3
import functools
4
4
import hashlib
5
5
6
- from collections import defaultdict
7
6
from contextlib import contextmanager
8
7
from pathlib import Path
9
8
from typing import TYPE_CHECKING
16
15
17
16
from poetry .core .constraints .version import parse_constraint
18
17
from poetry .core .packages .dependency import Dependency
19
- from poetry .core .packages .utils .link import Link
20
18
from poetry .core .utils .helpers import temporary_directory
21
19
from poetry .core .version .markers import parse_marker
22
20
37
35
38
36
if TYPE_CHECKING :
39
37
from packaging .utils import NormalizedName
38
+ from poetry .core .packages .utils .link import Link
40
39
41
40
from poetry .repositories .link_sources .base import LinkSource
42
41
from poetry .utils .authenticator import RepositoryCertificateConfig
@@ -109,10 +108,9 @@ def _cached_or_downloaded_file(
109
108
)
110
109
yield filepath
111
110
112
- def _get_info_from_wheel (self , url : str ) -> PackageInfo :
111
+ def _get_info_from_wheel (self , link : Link ) -> PackageInfo :
113
112
from poetry .inspection .info import PackageInfo
114
113
115
- link = Link (url )
116
114
netloc = link .netloc
117
115
118
116
# If "lazy-wheel" is enabled and the domain supports range requests
@@ -146,37 +144,73 @@ def _get_info_from_wheel(self, url: str) -> PackageInfo:
146
144
level = "debug" ,
147
145
)
148
146
self ._supports_range_requests [netloc ] = True
149
- return self ._get_info_from_wheel (link . url )
147
+ return self ._get_info_from_wheel (link )
150
148
151
- def _get_info_from_sdist (self , url : str ) -> PackageInfo :
149
+ def _get_info_from_sdist (self , link : Link ) -> PackageInfo :
152
150
from poetry .inspection .info import PackageInfo
153
151
154
- with self ._cached_or_downloaded_file (Link ( url ) ) as filepath :
152
+ with self ._cached_or_downloaded_file (link ) as filepath :
155
153
return PackageInfo .from_sdist (filepath )
156
154
157
- @staticmethod
158
- def _get_info_from_metadata (
159
- url : str , metadata : dict [str , pkginfo .Distribution ]
160
- ) -> PackageInfo | None :
161
- if url in metadata :
162
- dist = metadata [url ]
163
- return PackageInfo (
164
- name = dist .name ,
165
- version = dist .version ,
166
- summary = dist .summary ,
167
- requires_dist = list (dist .requires_dist ),
168
- requires_python = dist .requires_python ,
169
- )
155
+ def _get_info_from_metadata (self , link : Link ) -> PackageInfo | None :
156
+ if link .has_metadata :
157
+ try :
158
+ assert link .metadata_url is not None
159
+ response = self .session .get (link .metadata_url )
160
+ distribution = pkginfo .Distribution ()
161
+ if link .metadata_hash_name is not None :
162
+ metadata_hash = getattr (hashlib , link .metadata_hash_name )(
163
+ response .text .encode ()
164
+ ).hexdigest ()
165
+
166
+ if metadata_hash != link .metadata_hash :
167
+ self ._log (
168
+ f"Metadata file hash ({ metadata_hash } ) does not match"
169
+ f" expected hash ({ link .metadata_hash } )."
170
+ f" Metadata file for { link .filename } will be ignored." ,
171
+ level = "warning" ,
172
+ )
173
+ return None
174
+
175
+ distribution .parse (response .content )
176
+ return PackageInfo (
177
+ name = distribution .name ,
178
+ version = distribution .version ,
179
+ summary = distribution .summary ,
180
+ requires_dist = list (distribution .requires_dist ),
181
+ requires_python = distribution .requires_python ,
182
+ )
183
+
184
+ except requests .HTTPError :
185
+ self ._log (
186
+ f"Failed to retrieve metadata at { link .metadata_url } " ,
187
+ level = "warning" ,
188
+ )
189
+
170
190
return None
171
191
172
- def _get_info_from_urls (
192
+ def _get_info_from_links (
173
193
self ,
174
- urls : dict [str , list [str ]],
175
- metadata : dict [str , pkginfo .Distribution ] | None = None ,
194
+ links : list [Link ],
195
+ * ,
196
+ ignore_yanked : bool = True ,
176
197
) -> PackageInfo :
177
- metadata = metadata or {}
198
+ # Sort links by distribution type
199
+ wheels : list [Link ] = []
200
+ sdists : list [Link ] = []
201
+ for link in links :
202
+ if link .yanked and ignore_yanked :
203
+ # drop yanked files unless the entire release is yanked
204
+ continue
205
+ if link .is_wheel :
206
+ wheels .append (link )
207
+ elif link .filename .endswith (
208
+ (".tar.gz" , ".zip" , ".bz2" , ".xz" , ".Z" , ".tar" )
209
+ ):
210
+ sdists .append (link )
211
+
178
212
# Prefer to read data from wheels: this is faster and more reliable
179
- if wheels := urls . get ( "bdist_wheel" ) :
213
+ if wheels :
180
214
# We ought just to be able to look at any of the available wheels to read
181
215
# metadata, they all should give the same answer.
182
216
#
@@ -191,8 +225,7 @@ def _get_info_from_urls(
191
225
universal_python3_wheel = None
192
226
platform_specific_wheels = []
193
227
for wheel in wheels :
194
- link = Link (wheel )
195
- m = wheel_file_re .match (link .filename )
228
+ m = wheel_file_re .match (wheel .filename )
196
229
if not m :
197
230
continue
198
231
@@ -213,17 +246,17 @@ def _get_info_from_urls(
213
246
214
247
if universal_wheel is not None :
215
248
return self ._get_info_from_metadata (
216
- universal_wheel , metadata
249
+ universal_wheel
217
250
) or self ._get_info_from_wheel (universal_wheel )
218
251
219
252
info = None
220
253
if universal_python2_wheel and universal_python3_wheel :
221
254
info = self ._get_info_from_metadata (
222
- universal_python2_wheel , metadata
255
+ universal_python2_wheel
223
256
) or self ._get_info_from_wheel (universal_python2_wheel )
224
257
225
258
py3_info = self ._get_info_from_metadata (
226
- universal_python3_wheel , metadata
259
+ universal_python3_wheel
227
260
) or self ._get_info_from_wheel (universal_python3_wheel )
228
261
229
262
if info .requires_python or py3_info .requires_python :
@@ -275,71 +308,23 @@ def _get_info_from_urls(
275
308
# Prefer non platform specific wheels
276
309
if universal_python3_wheel :
277
310
return self ._get_info_from_metadata (
278
- universal_python3_wheel , metadata
311
+ universal_python3_wheel
279
312
) or self ._get_info_from_wheel (universal_python3_wheel )
280
313
281
314
if universal_python2_wheel :
282
315
return self ._get_info_from_metadata (
283
- universal_python2_wheel , metadata
316
+ universal_python2_wheel
284
317
) or self ._get_info_from_wheel (universal_python2_wheel )
285
318
286
319
if platform_specific_wheels :
287
320
first_wheel = platform_specific_wheels [0 ]
288
321
return self ._get_info_from_metadata (
289
- first_wheel , metadata
322
+ first_wheel
290
323
) or self ._get_info_from_wheel (first_wheel )
291
324
292
- return self ._get_info_from_metadata (
293
- urls ["sdist" ][0 ], metadata
294
- ) or self ._get_info_from_sdist (urls ["sdist" ][0 ])
295
-
296
- def _get_info_from_links (
297
- self ,
298
- links : list [Link ],
299
- * ,
300
- ignore_yanked : bool = True ,
301
- ) -> PackageInfo :
302
- urls = defaultdict (list )
303
- metadata : dict [str , pkginfo .Distribution ] = {}
304
- for link in links :
305
- if link .yanked and ignore_yanked :
306
- # drop yanked files unless the entire release is yanked
307
- continue
308
- if link .has_metadata :
309
- try :
310
- assert link .metadata_url is not None
311
- response = self .session .get (link .metadata_url )
312
- distribution = pkginfo .Distribution ()
313
- if link .metadata_hash_name is not None :
314
- metadata_hash = getattr (hashlib , link .metadata_hash_name )(
315
- response .text .encode ()
316
- ).hexdigest ()
317
-
318
- if metadata_hash != link .metadata_hash :
319
- self ._log (
320
- f"Metadata file hash ({ metadata_hash } ) does not match"
321
- f" expected hash ({ link .metadata_hash } )."
322
- f" Metadata file for { link .filename } will be ignored." ,
323
- level = "warning" ,
324
- )
325
- continue
326
-
327
- distribution .parse (response .content )
328
- metadata [link .url ] = distribution
329
- except requests .HTTPError :
330
- self ._log (
331
- f"Failed to retrieve metadata at { link .metadata_url } " ,
332
- level = "warning" ,
333
- )
334
-
335
- if link .is_wheel :
336
- urls ["bdist_wheel" ].append (link .url )
337
- elif link .filename .endswith (
338
- (".tar.gz" , ".zip" , ".bz2" , ".xz" , ".Z" , ".tar" )
339
- ):
340
- urls ["sdist" ].append (link .url )
341
-
342
- return self ._get_info_from_urls (urls , metadata )
325
+ return self ._get_info_from_metadata (sdists [0 ]) or self ._get_info_from_sdist (
326
+ sdists [0 ]
327
+ )
343
328
344
329
def _links_to_data (self , links : list [Link ], data : PackageInfo ) -> dict [str , Any ]:
345
330
if not links :
0 commit comments