Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
keewenaw
GitHub Repository: keewenaw/ethereum-wallet-cracker
Path: blob/main/test/lib/python3.9/site-packages/pip/_internal/operations/prepare.py
4804 views
1
"""Prepares a distribution for installation
2
"""
3
4
# The following comment should be removed at some point in the future.
5
# mypy: strict-optional=False
6
7
import logging
8
import mimetypes
9
import os
10
import shutil
11
from typing import Dict, Iterable, List, Optional
12
13
from pip._vendor.packaging.utils import canonicalize_name
14
15
from pip._internal.distributions import make_distribution_for_install_requirement
16
from pip._internal.distributions.installed import InstalledDistribution
17
from pip._internal.exceptions import (
18
DirectoryUrlHashUnsupported,
19
HashMismatch,
20
HashUnpinned,
21
InstallationError,
22
NetworkConnectionError,
23
PreviousBuildDirError,
24
VcsHashUnsupported,
25
)
26
from pip._internal.index.package_finder import PackageFinder
27
from pip._internal.metadata import BaseDistribution
28
from pip._internal.models.link import Link
29
from pip._internal.models.wheel import Wheel
30
from pip._internal.network.download import BatchDownloader, Downloader
31
from pip._internal.network.lazy_wheel import (
32
HTTPRangeRequestUnsupported,
33
dist_from_wheel_url,
34
)
35
from pip._internal.network.session import PipSession
36
from pip._internal.operations.build.build_tracker import BuildTracker
37
from pip._internal.req.req_install import InstallRequirement
38
from pip._internal.utils.hashes import Hashes, MissingHashes
39
from pip._internal.utils.logging import indent_log
40
from pip._internal.utils.misc import display_path, hide_url, is_installable_dir
41
from pip._internal.utils.temp_dir import TempDirectory
42
from pip._internal.utils.unpacking import unpack_file
43
from pip._internal.vcs import vcs
44
45
logger = logging.getLogger(__name__)
46
47
48
def _get_prepared_distribution(
49
req: InstallRequirement,
50
build_tracker: BuildTracker,
51
finder: PackageFinder,
52
build_isolation: bool,
53
check_build_deps: bool,
54
) -> BaseDistribution:
55
"""Prepare a distribution for installation."""
56
abstract_dist = make_distribution_for_install_requirement(req)
57
with build_tracker.track(req):
58
abstract_dist.prepare_distribution_metadata(
59
finder, build_isolation, check_build_deps
60
)
61
return abstract_dist.get_metadata_distribution()
62
63
64
def unpack_vcs_link(link: Link, location: str, verbosity: int) -> None:
65
vcs_backend = vcs.get_backend_for_scheme(link.scheme)
66
assert vcs_backend is not None
67
vcs_backend.unpack(location, url=hide_url(link.url), verbosity=verbosity)
68
69
70
class File:
71
def __init__(self, path: str, content_type: Optional[str]) -> None:
72
self.path = path
73
if content_type is None:
74
self.content_type = mimetypes.guess_type(path)[0]
75
else:
76
self.content_type = content_type
77
78
79
def get_http_url(
80
link: Link,
81
download: Downloader,
82
download_dir: Optional[str] = None,
83
hashes: Optional[Hashes] = None,
84
) -> File:
85
temp_dir = TempDirectory(kind="unpack", globally_managed=True)
86
# If a download dir is specified, is the file already downloaded there?
87
already_downloaded_path = None
88
if download_dir:
89
already_downloaded_path = _check_download_dir(link, download_dir, hashes)
90
91
if already_downloaded_path:
92
from_path = already_downloaded_path
93
content_type = None
94
else:
95
# let's download to a tmp dir
96
from_path, content_type = download(link, temp_dir.path)
97
if hashes:
98
hashes.check_against_path(from_path)
99
100
return File(from_path, content_type)
101
102
103
def get_file_url(
104
link: Link, download_dir: Optional[str] = None, hashes: Optional[Hashes] = None
105
) -> File:
106
"""Get file and optionally check its hash."""
107
# If a download dir is specified, is the file already there and valid?
108
already_downloaded_path = None
109
if download_dir:
110
already_downloaded_path = _check_download_dir(link, download_dir, hashes)
111
112
if already_downloaded_path:
113
from_path = already_downloaded_path
114
else:
115
from_path = link.file_path
116
117
# If --require-hashes is off, `hashes` is either empty, the
118
# link's embedded hash, or MissingHashes; it is required to
119
# match. If --require-hashes is on, we are satisfied by any
120
# hash in `hashes` matching: a URL-based or an option-based
121
# one; no internet-sourced hash will be in `hashes`.
122
if hashes:
123
hashes.check_against_path(from_path)
124
return File(from_path, None)
125
126
127
def unpack_url(
128
link: Link,
129
location: str,
130
download: Downloader,
131
verbosity: int,
132
download_dir: Optional[str] = None,
133
hashes: Optional[Hashes] = None,
134
) -> Optional[File]:
135
"""Unpack link into location, downloading if required.
136
137
:param hashes: A Hashes object, one of whose embedded hashes must match,
138
or HashMismatch will be raised. If the Hashes is empty, no matches are
139
required, and unhashable types of requirements (like VCS ones, which
140
would ordinarily raise HashUnsupported) are allowed.
141
"""
142
# non-editable vcs urls
143
if link.is_vcs:
144
unpack_vcs_link(link, location, verbosity=verbosity)
145
return None
146
147
assert not link.is_existing_dir()
148
149
# file urls
150
if link.is_file:
151
file = get_file_url(link, download_dir, hashes=hashes)
152
153
# http urls
154
else:
155
file = get_http_url(
156
link,
157
download,
158
download_dir,
159
hashes=hashes,
160
)
161
162
# unpack the archive to the build dir location. even when only downloading
163
# archives, they have to be unpacked to parse dependencies, except wheels
164
if not link.is_wheel:
165
unpack_file(file.path, location, file.content_type)
166
167
return file
168
169
170
def _check_download_dir(
171
link: Link, download_dir: str, hashes: Optional[Hashes]
172
) -> Optional[str]:
173
"""Check download_dir for previously downloaded file with correct hash
174
If a correct file is found return its path else None
175
"""
176
download_path = os.path.join(download_dir, link.filename)
177
178
if not os.path.exists(download_path):
179
return None
180
181
# If already downloaded, does its hash match?
182
logger.info("File was already downloaded %s", download_path)
183
if hashes:
184
try:
185
hashes.check_against_path(download_path)
186
except HashMismatch:
187
logger.warning(
188
"Previously-downloaded file %s has bad hash. Re-downloading.",
189
download_path,
190
)
191
os.unlink(download_path)
192
return None
193
return download_path
194
195
196
class RequirementPreparer:
197
"""Prepares a Requirement"""
198
199
def __init__(
200
self,
201
build_dir: str,
202
download_dir: Optional[str],
203
src_dir: str,
204
build_isolation: bool,
205
check_build_deps: bool,
206
build_tracker: BuildTracker,
207
session: PipSession,
208
progress_bar: str,
209
finder: PackageFinder,
210
require_hashes: bool,
211
use_user_site: bool,
212
lazy_wheel: bool,
213
verbosity: int,
214
) -> None:
215
super().__init__()
216
217
self.src_dir = src_dir
218
self.build_dir = build_dir
219
self.build_tracker = build_tracker
220
self._session = session
221
self._download = Downloader(session, progress_bar)
222
self._batch_download = BatchDownloader(session, progress_bar)
223
self.finder = finder
224
225
# Where still-packed archives should be written to. If None, they are
226
# not saved, and are deleted immediately after unpacking.
227
self.download_dir = download_dir
228
229
# Is build isolation allowed?
230
self.build_isolation = build_isolation
231
232
# Should check build dependencies?
233
self.check_build_deps = check_build_deps
234
235
# Should hash-checking be required?
236
self.require_hashes = require_hashes
237
238
# Should install in user site-packages?
239
self.use_user_site = use_user_site
240
241
# Should wheels be downloaded lazily?
242
self.use_lazy_wheel = lazy_wheel
243
244
# How verbose should underlying tooling be?
245
self.verbosity = verbosity
246
247
# Memoized downloaded files, as mapping of url: path.
248
self._downloaded: Dict[str, str] = {}
249
250
# Previous "header" printed for a link-based InstallRequirement
251
self._previous_requirement_header = ("", "")
252
253
def _log_preparing_link(self, req: InstallRequirement) -> None:
254
"""Provide context for the requirement being prepared."""
255
if req.link.is_file and not req.original_link_is_in_wheel_cache:
256
message = "Processing %s"
257
information = str(display_path(req.link.file_path))
258
else:
259
message = "Collecting %s"
260
information = str(req.req or req)
261
262
if (message, information) != self._previous_requirement_header:
263
self._previous_requirement_header = (message, information)
264
logger.info(message, information)
265
266
if req.original_link_is_in_wheel_cache:
267
with indent_log():
268
logger.info("Using cached %s", req.link.filename)
269
270
def _ensure_link_req_src_dir(
271
self, req: InstallRequirement, parallel_builds: bool
272
) -> None:
273
"""Ensure source_dir of a linked InstallRequirement."""
274
# Since source_dir is only set for editable requirements.
275
if req.link.is_wheel:
276
# We don't need to unpack wheels, so no need for a source
277
# directory.
278
return
279
assert req.source_dir is None
280
if req.link.is_existing_dir():
281
# build local directories in-tree
282
req.source_dir = req.link.file_path
283
return
284
285
# We always delete unpacked sdists after pip runs.
286
req.ensure_has_source_dir(
287
self.build_dir,
288
autodelete=True,
289
parallel_builds=parallel_builds,
290
)
291
292
# If a checkout exists, it's unwise to keep going. version
293
# inconsistencies are logged later, but do not fail the
294
# installation.
295
# FIXME: this won't upgrade when there's an existing
296
# package unpacked in `req.source_dir`
297
# TODO: this check is now probably dead code
298
if is_installable_dir(req.source_dir):
299
raise PreviousBuildDirError(
300
"pip can't proceed with requirements '{}' due to a"
301
"pre-existing build directory ({}). This is likely "
302
"due to a previous installation that failed . pip is "
303
"being responsible and not assuming it can delete this. "
304
"Please delete it and try again.".format(req, req.source_dir)
305
)
306
307
def _get_linked_req_hashes(self, req: InstallRequirement) -> Hashes:
308
# By the time this is called, the requirement's link should have
309
# been checked so we can tell what kind of requirements req is
310
# and raise some more informative errors than otherwise.
311
# (For example, we can raise VcsHashUnsupported for a VCS URL
312
# rather than HashMissing.)
313
if not self.require_hashes:
314
return req.hashes(trust_internet=True)
315
316
# We could check these first 2 conditions inside unpack_url
317
# and save repetition of conditions, but then we would
318
# report less-useful error messages for unhashable
319
# requirements, complaining that there's no hash provided.
320
if req.link.is_vcs:
321
raise VcsHashUnsupported()
322
if req.link.is_existing_dir():
323
raise DirectoryUrlHashUnsupported()
324
325
# Unpinned packages are asking for trouble when a new version
326
# is uploaded. This isn't a security check, but it saves users
327
# a surprising hash mismatch in the future.
328
# file:/// URLs aren't pinnable, so don't complain about them
329
# not being pinned.
330
if req.original_link is None and not req.is_pinned:
331
raise HashUnpinned()
332
333
# If known-good hashes are missing for this requirement,
334
# shim it with a facade object that will provoke hash
335
# computation and then raise a HashMissing exception
336
# showing the user what the hash should be.
337
return req.hashes(trust_internet=False) or MissingHashes()
338
339
def _fetch_metadata_using_lazy_wheel(
340
self,
341
link: Link,
342
) -> Optional[BaseDistribution]:
343
"""Fetch metadata using lazy wheel, if possible."""
344
if not self.use_lazy_wheel:
345
return None
346
if self.require_hashes:
347
logger.debug("Lazy wheel is not used as hash checking is required")
348
return None
349
if link.is_file or not link.is_wheel:
350
logger.debug(
351
"Lazy wheel is not used as %r does not points to a remote wheel",
352
link,
353
)
354
return None
355
356
wheel = Wheel(link.filename)
357
name = canonicalize_name(wheel.name)
358
logger.info(
359
"Obtaining dependency information from %s %s",
360
name,
361
wheel.version,
362
)
363
url = link.url.split("#", 1)[0]
364
try:
365
return dist_from_wheel_url(name, url, self._session)
366
except HTTPRangeRequestUnsupported:
367
logger.debug("%s does not support range requests", url)
368
return None
369
370
def _complete_partial_requirements(
371
self,
372
partially_downloaded_reqs: Iterable[InstallRequirement],
373
parallel_builds: bool = False,
374
) -> None:
375
"""Download any requirements which were only fetched by metadata."""
376
# Download to a temporary directory. These will be copied over as
377
# needed for downstream 'download', 'wheel', and 'install' commands.
378
temp_dir = TempDirectory(kind="unpack", globally_managed=True).path
379
380
# Map each link to the requirement that owns it. This allows us to set
381
# `req.local_file_path` on the appropriate requirement after passing
382
# all the links at once into BatchDownloader.
383
links_to_fully_download: Dict[Link, InstallRequirement] = {}
384
for req in partially_downloaded_reqs:
385
assert req.link
386
links_to_fully_download[req.link] = req
387
388
batch_download = self._batch_download(
389
links_to_fully_download.keys(),
390
temp_dir,
391
)
392
for link, (filepath, _) in batch_download:
393
logger.debug("Downloading link %s to %s", link, filepath)
394
req = links_to_fully_download[link]
395
req.local_file_path = filepath
396
397
# This step is necessary to ensure all lazy wheels are processed
398
# successfully by the 'download', 'wheel', and 'install' commands.
399
for req in partially_downloaded_reqs:
400
self._prepare_linked_requirement(req, parallel_builds)
401
402
def prepare_linked_requirement(
403
self, req: InstallRequirement, parallel_builds: bool = False
404
) -> BaseDistribution:
405
"""Prepare a requirement to be obtained from req.link."""
406
assert req.link
407
link = req.link
408
self._log_preparing_link(req)
409
with indent_log():
410
# Check if the relevant file is already available
411
# in the download directory
412
file_path = None
413
if self.download_dir is not None and link.is_wheel:
414
hashes = self._get_linked_req_hashes(req)
415
file_path = _check_download_dir(req.link, self.download_dir, hashes)
416
417
if file_path is not None:
418
# The file is already available, so mark it as downloaded
419
self._downloaded[req.link.url] = file_path
420
else:
421
# The file is not available, attempt to fetch only metadata
422
wheel_dist = self._fetch_metadata_using_lazy_wheel(link)
423
if wheel_dist is not None:
424
req.needs_more_preparation = True
425
return wheel_dist
426
427
# None of the optimizations worked, fully prepare the requirement
428
return self._prepare_linked_requirement(req, parallel_builds)
429
430
def prepare_linked_requirements_more(
431
self, reqs: Iterable[InstallRequirement], parallel_builds: bool = False
432
) -> None:
433
"""Prepare linked requirements more, if needed."""
434
reqs = [req for req in reqs if req.needs_more_preparation]
435
for req in reqs:
436
# Determine if any of these requirements were already downloaded.
437
if self.download_dir is not None and req.link.is_wheel:
438
hashes = self._get_linked_req_hashes(req)
439
file_path = _check_download_dir(req.link, self.download_dir, hashes)
440
if file_path is not None:
441
self._downloaded[req.link.url] = file_path
442
req.needs_more_preparation = False
443
444
# Prepare requirements we found were already downloaded for some
445
# reason. The other downloads will be completed separately.
446
partially_downloaded_reqs: List[InstallRequirement] = []
447
for req in reqs:
448
if req.needs_more_preparation:
449
partially_downloaded_reqs.append(req)
450
else:
451
self._prepare_linked_requirement(req, parallel_builds)
452
453
# TODO: separate this part out from RequirementPreparer when the v1
454
# resolver can be removed!
455
self._complete_partial_requirements(
456
partially_downloaded_reqs,
457
parallel_builds=parallel_builds,
458
)
459
460
def _prepare_linked_requirement(
461
self, req: InstallRequirement, parallel_builds: bool
462
) -> BaseDistribution:
463
assert req.link
464
link = req.link
465
466
self._ensure_link_req_src_dir(req, parallel_builds)
467
hashes = self._get_linked_req_hashes(req)
468
469
if link.is_existing_dir():
470
local_file = None
471
elif link.url not in self._downloaded:
472
try:
473
local_file = unpack_url(
474
link,
475
req.source_dir,
476
self._download,
477
self.verbosity,
478
self.download_dir,
479
hashes,
480
)
481
except NetworkConnectionError as exc:
482
raise InstallationError(
483
"Could not install requirement {} because of HTTP "
484
"error {} for URL {}".format(req, exc, link)
485
)
486
else:
487
file_path = self._downloaded[link.url]
488
if hashes:
489
hashes.check_against_path(file_path)
490
local_file = File(file_path, content_type=None)
491
492
# For use in later processing,
493
# preserve the file path on the requirement.
494
if local_file:
495
req.local_file_path = local_file.path
496
497
dist = _get_prepared_distribution(
498
req,
499
self.build_tracker,
500
self.finder,
501
self.build_isolation,
502
self.check_build_deps,
503
)
504
return dist
505
506
def save_linked_requirement(self, req: InstallRequirement) -> None:
507
assert self.download_dir is not None
508
assert req.link is not None
509
link = req.link
510
if link.is_vcs or (link.is_existing_dir() and req.editable):
511
# Make a .zip of the source_dir we already created.
512
req.archive(self.download_dir)
513
return
514
515
if link.is_existing_dir():
516
logger.debug(
517
"Not copying link to destination directory "
518
"since it is a directory: %s",
519
link,
520
)
521
return
522
if req.local_file_path is None:
523
# No distribution was downloaded for this requirement.
524
return
525
526
download_location = os.path.join(self.download_dir, link.filename)
527
if not os.path.exists(download_location):
528
shutil.copy(req.local_file_path, download_location)
529
download_path = display_path(download_location)
530
logger.info("Saved %s", download_path)
531
532
def prepare_editable_requirement(
533
self,
534
req: InstallRequirement,
535
) -> BaseDistribution:
536
"""Prepare an editable requirement."""
537
assert req.editable, "cannot prepare a non-editable req as editable"
538
539
logger.info("Obtaining %s", req)
540
541
with indent_log():
542
if self.require_hashes:
543
raise InstallationError(
544
"The editable requirement {} cannot be installed when "
545
"requiring hashes, because there is no single file to "
546
"hash.".format(req)
547
)
548
req.ensure_has_source_dir(self.src_dir)
549
req.update_editable()
550
551
dist = _get_prepared_distribution(
552
req,
553
self.build_tracker,
554
self.finder,
555
self.build_isolation,
556
self.check_build_deps,
557
)
558
559
req.check_if_exists(self.use_user_site)
560
561
return dist
562
563
def prepare_installed_requirement(
564
self,
565
req: InstallRequirement,
566
skip_reason: str,
567
) -> BaseDistribution:
568
"""Prepare an already-installed requirement."""
569
assert req.satisfied_by, "req should have been satisfied but isn't"
570
assert skip_reason is not None, (
571
"did not get skip reason skipped but req.satisfied_by "
572
"is set to {}".format(req.satisfied_by)
573
)
574
logger.info(
575
"Requirement %s: %s (%s)", skip_reason, req, req.satisfied_by.version
576
)
577
with indent_log():
578
if self.require_hashes:
579
logger.debug(
580
"Since it is already installed, we are trusting this "
581
"package without checking its hash. To ensure a "
582
"completely repeatable environment, install into an "
583
"empty virtualenv."
584
)
585
return InstalledDistribution(req).get_metadata_distribution()
586
587