Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/pip/_internal/req/req_install.py: 23%
416 statements
« prev ^ index » next coverage.py v7.4.3, created at 2024-02-26 06:33 +0000
« prev ^ index » next coverage.py v7.4.3, created at 2024-02-26 06:33 +0000
1import functools
2import logging
3import os
4import shutil
5import sys
6import uuid
7import zipfile
8from optparse import Values
9from pathlib import Path
10from typing import Any, Collection, Dict, Iterable, List, Optional, Sequence, Union
12from pip._vendor.packaging.markers import Marker
13from pip._vendor.packaging.requirements import Requirement
14from pip._vendor.packaging.specifiers import SpecifierSet
15from pip._vendor.packaging.utils import canonicalize_name
16from pip._vendor.packaging.version import Version
17from pip._vendor.packaging.version import parse as parse_version
18from pip._vendor.pyproject_hooks import BuildBackendHookCaller
20from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment
21from pip._internal.exceptions import InstallationError, PreviousBuildDirError
22from pip._internal.locations import get_scheme
23from pip._internal.metadata import (
24 BaseDistribution,
25 get_default_environment,
26 get_directory_distribution,
27 get_wheel_distribution,
28)
29from pip._internal.metadata.base import FilesystemWheel
30from pip._internal.models.direct_url import DirectUrl
31from pip._internal.models.link import Link
32from pip._internal.operations.build.metadata import generate_metadata
33from pip._internal.operations.build.metadata_editable import generate_editable_metadata
34from pip._internal.operations.build.metadata_legacy import (
35 generate_metadata as generate_metadata_legacy,
36)
37from pip._internal.operations.install.editable_legacy import (
38 install_editable as install_editable_legacy,
39)
40from pip._internal.operations.install.wheel import install_wheel
41from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path
42from pip._internal.req.req_uninstall import UninstallPathSet
43from pip._internal.utils.deprecation import deprecated
44from pip._internal.utils.hashes import Hashes
45from pip._internal.utils.misc import (
46 ConfiguredBuildBackendHookCaller,
47 ask_path_exists,
48 backup_dir,
49 display_path,
50 hide_url,
51 is_installable_dir,
52 redact_auth_from_requirement,
53 redact_auth_from_url,
54)
55from pip._internal.utils.packaging import safe_extra
56from pip._internal.utils.subprocess import runner_with_spinner_message
57from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
58from pip._internal.utils.unpacking import unpack_file
59from pip._internal.utils.virtualenv import running_under_virtualenv
60from pip._internal.vcs import vcs
62logger = logging.getLogger(__name__)
65class InstallRequirement:
66 """
67 Represents something that may be installed later on, may have information
68 about where to fetch the relevant requirement and also contains logic for
69 installing the said requirement.
70 """
72 def __init__(
73 self,
74 req: Optional[Requirement],
75 comes_from: Optional[Union[str, "InstallRequirement"]],
76 editable: bool = False,
77 link: Optional[Link] = None,
78 markers: Optional[Marker] = None,
79 use_pep517: Optional[bool] = None,
80 isolated: bool = False,
81 *,
82 global_options: Optional[List[str]] = None,
83 hash_options: Optional[Dict[str, List[str]]] = None,
84 config_settings: Optional[Dict[str, Union[str, List[str]]]] = None,
85 constraint: bool = False,
86 extras: Collection[str] = (),
87 user_supplied: bool = False,
88 permit_editable_wheels: bool = False,
89 ) -> None:
90 assert req is None or isinstance(req, Requirement), req
91 self.req = req
92 self.comes_from = comes_from
93 self.constraint = constraint
94 self.editable = editable
95 self.permit_editable_wheels = permit_editable_wheels
97 # source_dir is the local directory where the linked requirement is
98 # located, or unpacked. In case unpacking is needed, creating and
99 # populating source_dir is done by the RequirementPreparer. Note this
100 # is not necessarily the directory where pyproject.toml or setup.py is
101 # located - that one is obtained via unpacked_source_directory.
102 self.source_dir: Optional[str] = None
103 if self.editable:
104 assert link
105 if link.is_file:
106 self.source_dir = os.path.normpath(os.path.abspath(link.file_path))
108 # original_link is the direct URL that was provided by the user for the
109 # requirement, either directly or via a constraints file.
110 if link is None and req and req.url:
111 # PEP 508 URL requirement
112 link = Link(req.url)
113 self.link = self.original_link = link
115 # When this InstallRequirement is a wheel obtained from the cache of locally
116 # built wheels, this is the source link corresponding to the cache entry, which
117 # was used to download and build the cached wheel.
118 self.cached_wheel_source_link: Optional[Link] = None
120 # Information about the location of the artifact that was downloaded . This
121 # property is guaranteed to be set in resolver results.
122 self.download_info: Optional[DirectUrl] = None
124 # Path to any downloaded or already-existing package.
125 self.local_file_path: Optional[str] = None
126 if self.link and self.link.is_file:
127 self.local_file_path = self.link.file_path
129 if extras:
130 self.extras = extras
131 elif req:
132 self.extras = req.extras
133 else:
134 self.extras = set()
135 if markers is None and req:
136 markers = req.marker
137 self.markers = markers
139 # This holds the Distribution object if this requirement is already installed.
140 self.satisfied_by: Optional[BaseDistribution] = None
141 # Whether the installation process should try to uninstall an existing
142 # distribution before installing this requirement.
143 self.should_reinstall = False
144 # Temporary build location
145 self._temp_build_dir: Optional[TempDirectory] = None
146 # Set to True after successful installation
147 self.install_succeeded: Optional[bool] = None
148 # Supplied options
149 self.global_options = global_options if global_options else []
150 self.hash_options = hash_options if hash_options else {}
151 self.config_settings = config_settings
152 # Set to True after successful preparation of this requirement
153 self.prepared = False
154 # User supplied requirement are explicitly requested for installation
155 # by the user via CLI arguments or requirements files, as opposed to,
156 # e.g. dependencies, extras or constraints.
157 self.user_supplied = user_supplied
159 self.isolated = isolated
160 self.build_env: BuildEnvironment = NoOpBuildEnvironment()
162 # For PEP 517, the directory where we request the project metadata
163 # gets stored. We need this to pass to build_wheel, so the backend
164 # can ensure that the wheel matches the metadata (see the PEP for
165 # details).
166 self.metadata_directory: Optional[str] = None
168 # The static build requirements (from pyproject.toml)
169 self.pyproject_requires: Optional[List[str]] = None
171 # Build requirements that we will check are available
172 self.requirements_to_check: List[str] = []
174 # The PEP 517 backend we should use to build the project
175 self.pep517_backend: Optional[BuildBackendHookCaller] = None
177 # Are we using PEP 517 for this requirement?
178 # After pyproject.toml has been loaded, the only valid values are True
179 # and False. Before loading, None is valid (meaning "use the default").
180 # Setting an explicit value before loading pyproject.toml is supported,
181 # but after loading this flag should be treated as read only.
182 self.use_pep517 = use_pep517
184 # If config settings are provided, enforce PEP 517.
185 if self.config_settings:
186 if self.use_pep517 is False:
187 logger.warning(
188 "--no-use-pep517 ignored for %s "
189 "because --config-settings are specified.",
190 self,
191 )
192 self.use_pep517 = True
194 # This requirement needs more preparation before it can be built
195 self.needs_more_preparation = False
197 # This requirement needs to be unpacked before it can be installed.
198 self._archive_source: Optional[Path] = None
200 def __str__(self) -> str:
201 if self.req:
202 s = redact_auth_from_requirement(self.req)
203 if self.link:
204 s += f" from {redact_auth_from_url(self.link.url)}"
205 elif self.link:
206 s = redact_auth_from_url(self.link.url)
207 else:
208 s = "<InstallRequirement>"
209 if self.satisfied_by is not None:
210 if self.satisfied_by.location is not None:
211 location = display_path(self.satisfied_by.location)
212 else:
213 location = "<memory>"
214 s += f" in {location}"
215 if self.comes_from:
216 if isinstance(self.comes_from, str):
217 comes_from: Optional[str] = self.comes_from
218 else:
219 comes_from = self.comes_from.from_path()
220 if comes_from:
221 s += f" (from {comes_from})"
222 return s
224 def __repr__(self) -> str:
225 return "<{} object: {} editable={!r}>".format(
226 self.__class__.__name__, str(self), self.editable
227 )
229 def format_debug(self) -> str:
230 """An un-tested helper for getting state, for debugging."""
231 attributes = vars(self)
232 names = sorted(attributes)
234 state = (f"{attr}={attributes[attr]!r}" for attr in sorted(names))
235 return "<{name} object: {{{state}}}>".format(
236 name=self.__class__.__name__,
237 state=", ".join(state),
238 )
240 # Things that are valid for all kinds of requirements?
241 @property
242 def name(self) -> Optional[str]:
243 if self.req is None:
244 return None
245 return self.req.name
247 @functools.cached_property
248 def supports_pyproject_editable(self) -> bool:
249 if not self.use_pep517:
250 return False
251 assert self.pep517_backend
252 with self.build_env:
253 runner = runner_with_spinner_message(
254 "Checking if build backend supports build_editable"
255 )
256 with self.pep517_backend.subprocess_runner(runner):
257 return "build_editable" in self.pep517_backend._supported_features()
259 @property
260 def specifier(self) -> SpecifierSet:
261 assert self.req is not None
262 return self.req.specifier
264 @property
265 def is_direct(self) -> bool:
266 """Whether this requirement was specified as a direct URL."""
267 return self.original_link is not None
269 @property
270 def is_pinned(self) -> bool:
271 """Return whether I am pinned to an exact version.
273 For example, some-package==1.2 is pinned; some-package>1.2 is not.
274 """
275 assert self.req is not None
276 specifiers = self.req.specifier
277 return len(specifiers) == 1 and next(iter(specifiers)).operator in {"==", "==="}
279 def match_markers(self, extras_requested: Optional[Iterable[str]] = None) -> bool:
280 if not extras_requested:
281 # Provide an extra to safely evaluate the markers
282 # without matching any extra
283 extras_requested = ("",)
284 if self.markers is not None:
285 return any(
286 self.markers.evaluate({"extra": extra})
287 # TODO: Remove these two variants when packaging is upgraded to
288 # support the marker comparison logic specified in PEP 685.
289 or self.markers.evaluate({"extra": safe_extra(extra)})
290 or self.markers.evaluate({"extra": canonicalize_name(extra)})
291 for extra in extras_requested
292 )
293 else:
294 return True
296 @property
297 def has_hash_options(self) -> bool:
298 """Return whether any known-good hashes are specified as options.
300 These activate --require-hashes mode; hashes specified as part of a
301 URL do not.
303 """
304 return bool(self.hash_options)
306 def hashes(self, trust_internet: bool = True) -> Hashes:
307 """Return a hash-comparer that considers my option- and URL-based
308 hashes to be known-good.
310 Hashes in URLs--ones embedded in the requirements file, not ones
311 downloaded from an index server--are almost peers with ones from
312 flags. They satisfy --require-hashes (whether it was implicitly or
313 explicitly activated) but do not activate it. md5 and sha224 are not
314 allowed in flags, which should nudge people toward good algos. We
315 always OR all hashes together, even ones from URLs.
317 :param trust_internet: Whether to trust URL-based (#md5=...) hashes
318 downloaded from the internet, as by populate_link()
320 """
321 good_hashes = self.hash_options.copy()
322 if trust_internet:
323 link = self.link
324 elif self.is_direct and self.user_supplied:
325 link = self.original_link
326 else:
327 link = None
328 if link and link.hash:
329 assert link.hash_name is not None
330 good_hashes.setdefault(link.hash_name, []).append(link.hash)
331 return Hashes(good_hashes)
333 def from_path(self) -> Optional[str]:
334 """Format a nice indicator to show where this "comes from" """
335 if self.req is None:
336 return None
337 s = str(self.req)
338 if self.comes_from:
339 comes_from: Optional[str]
340 if isinstance(self.comes_from, str):
341 comes_from = self.comes_from
342 else:
343 comes_from = self.comes_from.from_path()
344 if comes_from:
345 s += "->" + comes_from
346 return s
348 def ensure_build_location(
349 self, build_dir: str, autodelete: bool, parallel_builds: bool
350 ) -> str:
351 assert build_dir is not None
352 if self._temp_build_dir is not None:
353 assert self._temp_build_dir.path
354 return self._temp_build_dir.path
355 if self.req is None:
356 # Some systems have /tmp as a symlink which confuses custom
357 # builds (such as numpy). Thus, we ensure that the real path
358 # is returned.
359 self._temp_build_dir = TempDirectory(
360 kind=tempdir_kinds.REQ_BUILD, globally_managed=True
361 )
363 return self._temp_build_dir.path
365 # This is the only remaining place where we manually determine the path
366 # for the temporary directory. It is only needed for editables where
367 # it is the value of the --src option.
369 # When parallel builds are enabled, add a UUID to the build directory
370 # name so multiple builds do not interfere with each other.
371 dir_name: str = canonicalize_name(self.req.name)
372 if parallel_builds:
373 dir_name = f"{dir_name}_{uuid.uuid4().hex}"
375 # FIXME: Is there a better place to create the build_dir? (hg and bzr
376 # need this)
377 if not os.path.exists(build_dir):
378 logger.debug("Creating directory %s", build_dir)
379 os.makedirs(build_dir)
380 actual_build_dir = os.path.join(build_dir, dir_name)
381 # `None` indicates that we respect the globally-configured deletion
382 # settings, which is what we actually want when auto-deleting.
383 delete_arg = None if autodelete else False
384 return TempDirectory(
385 path=actual_build_dir,
386 delete=delete_arg,
387 kind=tempdir_kinds.REQ_BUILD,
388 globally_managed=True,
389 ).path
391 def _set_requirement(self) -> None:
392 """Set requirement after generating metadata."""
393 assert self.req is None
394 assert self.metadata is not None
395 assert self.source_dir is not None
397 # Construct a Requirement object from the generated metadata
398 if isinstance(parse_version(self.metadata["Version"]), Version):
399 op = "=="
400 else:
401 op = "==="
403 self.req = Requirement(
404 "".join(
405 [
406 self.metadata["Name"],
407 op,
408 self.metadata["Version"],
409 ]
410 )
411 )
413 def warn_on_mismatching_name(self) -> None:
414 assert self.req is not None
415 metadata_name = canonicalize_name(self.metadata["Name"])
416 if canonicalize_name(self.req.name) == metadata_name:
417 # Everything is fine.
418 return
420 # If we're here, there's a mismatch. Log a warning about it.
421 logger.warning(
422 "Generating metadata for package %s "
423 "produced metadata for project name %s. Fix your "
424 "#egg=%s fragments.",
425 self.name,
426 metadata_name,
427 self.name,
428 )
429 self.req = Requirement(metadata_name)
431 def check_if_exists(self, use_user_site: bool) -> None:
432 """Find an installed distribution that satisfies or conflicts
433 with this requirement, and set self.satisfied_by or
434 self.should_reinstall appropriately.
435 """
436 if self.req is None:
437 return
438 existing_dist = get_default_environment().get_distribution(self.req.name)
439 if not existing_dist:
440 return
442 version_compatible = self.req.specifier.contains(
443 existing_dist.version,
444 prereleases=True,
445 )
446 if not version_compatible:
447 self.satisfied_by = None
448 if use_user_site:
449 if existing_dist.in_usersite:
450 self.should_reinstall = True
451 elif running_under_virtualenv() and existing_dist.in_site_packages:
452 raise InstallationError(
453 f"Will not install to the user site because it will "
454 f"lack sys.path precedence to {existing_dist.raw_name} "
455 f"in {existing_dist.location}"
456 )
457 else:
458 self.should_reinstall = True
459 else:
460 if self.editable:
461 self.should_reinstall = True
462 # when installing editables, nothing pre-existing should ever
463 # satisfy
464 self.satisfied_by = None
465 else:
466 self.satisfied_by = existing_dist
468 # Things valid for wheels
469 @property
470 def is_wheel(self) -> bool:
471 if not self.link:
472 return False
473 return self.link.is_wheel
475 @property
476 def is_wheel_from_cache(self) -> bool:
477 # When True, it means that this InstallRequirement is a local wheel file in the
478 # cache of locally built wheels.
479 return self.cached_wheel_source_link is not None
481 # Things valid for sdists
482 @property
483 def unpacked_source_directory(self) -> str:
484 assert self.source_dir, f"No source dir for {self}"
485 return os.path.join(
486 self.source_dir, self.link and self.link.subdirectory_fragment or ""
487 )
489 @property
490 def setup_py_path(self) -> str:
491 assert self.source_dir, f"No source dir for {self}"
492 setup_py = os.path.join(self.unpacked_source_directory, "setup.py")
494 return setup_py
496 @property
497 def setup_cfg_path(self) -> str:
498 assert self.source_dir, f"No source dir for {self}"
499 setup_cfg = os.path.join(self.unpacked_source_directory, "setup.cfg")
501 return setup_cfg
503 @property
504 def pyproject_toml_path(self) -> str:
505 assert self.source_dir, f"No source dir for {self}"
506 return make_pyproject_path(self.unpacked_source_directory)
508 def load_pyproject_toml(self) -> None:
509 """Load the pyproject.toml file.
511 After calling this routine, all of the attributes related to PEP 517
512 processing for this requirement have been set. In particular, the
513 use_pep517 attribute can be used to determine whether we should
514 follow the PEP 517 or legacy (setup.py) code path.
515 """
516 pyproject_toml_data = load_pyproject_toml(
517 self.use_pep517, self.pyproject_toml_path, self.setup_py_path, str(self)
518 )
520 if pyproject_toml_data is None:
521 assert not self.config_settings
522 self.use_pep517 = False
523 return
525 self.use_pep517 = True
526 requires, backend, check, backend_path = pyproject_toml_data
527 self.requirements_to_check = check
528 self.pyproject_requires = requires
529 self.pep517_backend = ConfiguredBuildBackendHookCaller(
530 self,
531 self.unpacked_source_directory,
532 backend,
533 backend_path=backend_path,
534 )
536 def isolated_editable_sanity_check(self) -> None:
537 """Check that an editable requirement if valid for use with PEP 517/518.
539 This verifies that an editable that has a pyproject.toml either supports PEP 660
540 or as a setup.py or a setup.cfg
541 """
542 if (
543 self.editable
544 and self.use_pep517
545 and not self.supports_pyproject_editable
546 and not os.path.isfile(self.setup_py_path)
547 and not os.path.isfile(self.setup_cfg_path)
548 ):
549 raise InstallationError(
550 f"Project {self} has a 'pyproject.toml' and its build "
551 f"backend is missing the 'build_editable' hook. Since it does not "
552 f"have a 'setup.py' nor a 'setup.cfg', "
553 f"it cannot be installed in editable mode. "
554 f"Consider using a build backend that supports PEP 660."
555 )
557 def prepare_metadata(self) -> None:
558 """Ensure that project metadata is available.
560 Under PEP 517 and PEP 660, call the backend hook to prepare the metadata.
561 Under legacy processing, call setup.py egg-info.
562 """
563 assert self.source_dir, f"No source dir for {self}"
564 details = self.name or f"from {self.link}"
566 if self.use_pep517:
567 assert self.pep517_backend is not None
568 if (
569 self.editable
570 and self.permit_editable_wheels
571 and self.supports_pyproject_editable
572 ):
573 self.metadata_directory = generate_editable_metadata(
574 build_env=self.build_env,
575 backend=self.pep517_backend,
576 details=details,
577 )
578 else:
579 self.metadata_directory = generate_metadata(
580 build_env=self.build_env,
581 backend=self.pep517_backend,
582 details=details,
583 )
584 else:
585 self.metadata_directory = generate_metadata_legacy(
586 build_env=self.build_env,
587 setup_py_path=self.setup_py_path,
588 source_dir=self.unpacked_source_directory,
589 isolated=self.isolated,
590 details=details,
591 )
593 # Act on the newly generated metadata, based on the name and version.
594 if not self.name:
595 self._set_requirement()
596 else:
597 self.warn_on_mismatching_name()
599 self.assert_source_matches_version()
601 @property
602 def metadata(self) -> Any:
603 if not hasattr(self, "_metadata"):
604 self._metadata = self.get_dist().metadata
606 return self._metadata
608 def get_dist(self) -> BaseDistribution:
609 if self.metadata_directory:
610 return get_directory_distribution(self.metadata_directory)
611 elif self.local_file_path and self.is_wheel:
612 assert self.req is not None
613 return get_wheel_distribution(
614 FilesystemWheel(self.local_file_path),
615 canonicalize_name(self.req.name),
616 )
617 raise AssertionError(
618 f"InstallRequirement {self} has no metadata directory and no wheel: "
619 f"can't make a distribution."
620 )
622 def assert_source_matches_version(self) -> None:
623 assert self.source_dir, f"No source dir for {self}"
624 version = self.metadata["version"]
625 if self.req and self.req.specifier and version not in self.req.specifier:
626 logger.warning(
627 "Requested %s, but installing version %s",
628 self,
629 version,
630 )
631 else:
632 logger.debug(
633 "Source in %s has version %s, which satisfies requirement %s",
634 display_path(self.source_dir),
635 version,
636 self,
637 )
639 # For both source distributions and editables
640 def ensure_has_source_dir(
641 self,
642 parent_dir: str,
643 autodelete: bool = False,
644 parallel_builds: bool = False,
645 ) -> None:
646 """Ensure that a source_dir is set.
648 This will create a temporary build dir if the name of the requirement
649 isn't known yet.
651 :param parent_dir: The ideal pip parent_dir for the source_dir.
652 Generally src_dir for editables and build_dir for sdists.
653 :return: self.source_dir
654 """
655 if self.source_dir is None:
656 self.source_dir = self.ensure_build_location(
657 parent_dir,
658 autodelete=autodelete,
659 parallel_builds=parallel_builds,
660 )
662 def needs_unpacked_archive(self, archive_source: Path) -> None:
663 assert self._archive_source is None
664 self._archive_source = archive_source
666 def ensure_pristine_source_checkout(self) -> None:
667 """Ensure the source directory has not yet been built in."""
668 assert self.source_dir is not None
669 if self._archive_source is not None:
670 unpack_file(str(self._archive_source), self.source_dir)
671 elif is_installable_dir(self.source_dir):
672 # If a checkout exists, it's unwise to keep going.
673 # version inconsistencies are logged later, but do not fail
674 # the installation.
675 raise PreviousBuildDirError(
676 f"pip can't proceed with requirements '{self}' due to a "
677 f"pre-existing build directory ({self.source_dir}). This is likely "
678 "due to a previous installation that failed . pip is "
679 "being responsible and not assuming it can delete this. "
680 "Please delete it and try again."
681 )
683 # For editable installations
684 def update_editable(self) -> None:
685 if not self.link:
686 logger.debug(
687 "Cannot update repository at %s; repository location is unknown",
688 self.source_dir,
689 )
690 return
691 assert self.editable
692 assert self.source_dir
693 if self.link.scheme == "file":
694 # Static paths don't get updated
695 return
696 vcs_backend = vcs.get_backend_for_scheme(self.link.scheme)
697 # Editable requirements are validated in Requirement constructors.
698 # So here, if it's neither a path nor a valid VCS URL, it's a bug.
699 assert vcs_backend, f"Unsupported VCS URL {self.link.url}"
700 hidden_url = hide_url(self.link.url)
701 vcs_backend.obtain(self.source_dir, url=hidden_url, verbosity=0)
703 # Top-level Actions
704 def uninstall(
705 self, auto_confirm: bool = False, verbose: bool = False
706 ) -> Optional[UninstallPathSet]:
707 """
708 Uninstall the distribution currently satisfying this requirement.
710 Prompts before removing or modifying files unless
711 ``auto_confirm`` is True.
713 Refuses to delete or modify files outside of ``sys.prefix`` -
714 thus uninstallation within a virtual environment can only
715 modify that virtual environment, even if the virtualenv is
716 linked to global site-packages.
718 """
719 assert self.req
720 dist = get_default_environment().get_distribution(self.req.name)
721 if not dist:
722 logger.warning("Skipping %s as it is not installed.", self.name)
723 return None
724 logger.info("Found existing installation: %s", dist)
726 uninstalled_pathset = UninstallPathSet.from_dist(dist)
727 uninstalled_pathset.remove(auto_confirm, verbose)
728 return uninstalled_pathset
730 def _get_archive_name(self, path: str, parentdir: str, rootdir: str) -> str:
731 def _clean_zip_name(name: str, prefix: str) -> str:
732 assert name.startswith(
733 prefix + os.path.sep
734 ), f"name {name!r} doesn't start with prefix {prefix!r}"
735 name = name[len(prefix) + 1 :]
736 name = name.replace(os.path.sep, "/")
737 return name
739 assert self.req is not None
740 path = os.path.join(parentdir, path)
741 name = _clean_zip_name(path, rootdir)
742 return self.req.name + "/" + name
744 def archive(self, build_dir: Optional[str]) -> None:
745 """Saves archive to provided build_dir.
747 Used for saving downloaded VCS requirements as part of `pip download`.
748 """
749 assert self.source_dir
750 if build_dir is None:
751 return
753 create_archive = True
754 archive_name = "{}-{}.zip".format(self.name, self.metadata["version"])
755 archive_path = os.path.join(build_dir, archive_name)
757 if os.path.exists(archive_path):
758 response = ask_path_exists(
759 f"The file {display_path(archive_path)} exists. (i)gnore, (w)ipe, "
760 "(b)ackup, (a)bort ",
761 ("i", "w", "b", "a"),
762 )
763 if response == "i":
764 create_archive = False
765 elif response == "w":
766 logger.warning("Deleting %s", display_path(archive_path))
767 os.remove(archive_path)
768 elif response == "b":
769 dest_file = backup_dir(archive_path)
770 logger.warning(
771 "Backing up %s to %s",
772 display_path(archive_path),
773 display_path(dest_file),
774 )
775 shutil.move(archive_path, dest_file)
776 elif response == "a":
777 sys.exit(-1)
779 if not create_archive:
780 return
782 zip_output = zipfile.ZipFile(
783 archive_path,
784 "w",
785 zipfile.ZIP_DEFLATED,
786 allowZip64=True,
787 )
788 with zip_output:
789 dir = os.path.normcase(os.path.abspath(self.unpacked_source_directory))
790 for dirpath, dirnames, filenames in os.walk(dir):
791 for dirname in dirnames:
792 dir_arcname = self._get_archive_name(
793 dirname,
794 parentdir=dirpath,
795 rootdir=dir,
796 )
797 zipdir = zipfile.ZipInfo(dir_arcname + "/")
798 zipdir.external_attr = 0x1ED << 16 # 0o755
799 zip_output.writestr(zipdir, "")
800 for filename in filenames:
801 file_arcname = self._get_archive_name(
802 filename,
803 parentdir=dirpath,
804 rootdir=dir,
805 )
806 filename = os.path.join(dirpath, filename)
807 zip_output.write(filename, file_arcname)
809 logger.info("Saved %s", display_path(archive_path))
811 def install(
812 self,
813 global_options: Optional[Sequence[str]] = None,
814 root: Optional[str] = None,
815 home: Optional[str] = None,
816 prefix: Optional[str] = None,
817 warn_script_location: bool = True,
818 use_user_site: bool = False,
819 pycompile: bool = True,
820 ) -> None:
821 assert self.req is not None
822 scheme = get_scheme(
823 self.req.name,
824 user=use_user_site,
825 home=home,
826 root=root,
827 isolated=self.isolated,
828 prefix=prefix,
829 )
831 if self.editable and not self.is_wheel:
832 if self.config_settings:
833 logger.warning(
834 "--config-settings ignored for legacy editable install of %s. "
835 "Consider upgrading to a version of setuptools "
836 "that supports PEP 660 (>= 64).",
837 self,
838 )
839 install_editable_legacy(
840 global_options=global_options if global_options is not None else [],
841 prefix=prefix,
842 home=home,
843 use_user_site=use_user_site,
844 name=self.req.name,
845 setup_py_path=self.setup_py_path,
846 isolated=self.isolated,
847 build_env=self.build_env,
848 unpacked_source_directory=self.unpacked_source_directory,
849 )
850 self.install_succeeded = True
851 return
853 assert self.is_wheel
854 assert self.local_file_path
856 install_wheel(
857 self.req.name,
858 self.local_file_path,
859 scheme=scheme,
860 req_description=str(self.req),
861 pycompile=pycompile,
862 warn_script_location=warn_script_location,
863 direct_url=self.download_info if self.is_direct else None,
864 requested=self.user_supplied,
865 )
866 self.install_succeeded = True
869def check_invalid_constraint_type(req: InstallRequirement) -> str:
870 # Check for unsupported forms
871 problem = ""
872 if not req.name:
873 problem = "Unnamed requirements are not allowed as constraints"
874 elif req.editable:
875 problem = "Editable requirements are not allowed as constraints"
876 elif req.extras:
877 problem = "Constraints cannot have extras"
879 if problem:
880 deprecated(
881 reason=(
882 "Constraints are only allowed to take the form of a package "
883 "name and a version specifier. Other forms were originally "
884 "permitted as an accident of the implementation, but were "
885 "undocumented. The new implementation of the resolver no "
886 "longer supports these forms."
887 ),
888 replacement="replacing the constraint with a requirement",
889 # No plan yet for when the new resolver becomes default
890 gone_in=None,
891 issue=8210,
892 )
894 return problem
897def _has_option(options: Values, reqs: List[InstallRequirement], option: str) -> bool:
898 if getattr(options, option, None):
899 return True
900 for req in reqs:
901 if getattr(req, option, None):
902 return True
903 return False
906def check_legacy_setup_py_options(
907 options: Values,
908 reqs: List[InstallRequirement],
909) -> None:
910 has_build_options = _has_option(options, reqs, "build_options")
911 has_global_options = _has_option(options, reqs, "global_options")
912 if has_build_options or has_global_options:
913 deprecated(
914 reason="--build-option and --global-option are deprecated.",
915 issue=11859,
916 replacement="to use --config-settings",
917 gone_in="24.2",
918 )
919 logger.warning(
920 "Implying --no-binary=:all: due to the presence of "
921 "--build-option / --global-option. "
922 )
923 options.format_control.disallow_binaries()