Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/pip/_internal/req/req_install.py: 23%

391 statements  

« prev     ^ index     » next       coverage.py v7.2.7, created at 2023-06-07 06:48 +0000

1# The following comment should be removed at some point in the future. 

2# mypy: strict-optional=False 

3 

4import functools 

5import logging 

6import os 

7import shutil 

8import sys 

9import uuid 

10import zipfile 

11from optparse import Values 

12from typing import Any, Collection, Dict, Iterable, List, Optional, Sequence, Union 

13 

14from pip._vendor.packaging.markers import Marker 

15from pip._vendor.packaging.requirements import Requirement 

16from pip._vendor.packaging.specifiers import SpecifierSet 

17from pip._vendor.packaging.utils import canonicalize_name 

18from pip._vendor.packaging.version import Version 

19from pip._vendor.packaging.version import parse as parse_version 

20from pip._vendor.pyproject_hooks import BuildBackendHookCaller 

21 

22from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment 

23from pip._internal.exceptions import InstallationError 

24from pip._internal.locations import get_scheme 

25from pip._internal.metadata import ( 

26 BaseDistribution, 

27 get_default_environment, 

28 get_directory_distribution, 

29 get_wheel_distribution, 

30) 

31from pip._internal.metadata.base import FilesystemWheel 

32from pip._internal.models.direct_url import DirectUrl 

33from pip._internal.models.link import Link 

34from pip._internal.operations.build.metadata import generate_metadata 

35from pip._internal.operations.build.metadata_editable import generate_editable_metadata 

36from pip._internal.operations.build.metadata_legacy import ( 

37 generate_metadata as generate_metadata_legacy, 

38) 

39from pip._internal.operations.install.editable_legacy import ( 

40 install_editable as install_editable_legacy, 

41) 

42from pip._internal.operations.install.wheel import install_wheel 

43from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path 

44from pip._internal.req.req_uninstall import UninstallPathSet 

45from pip._internal.utils.deprecation import deprecated 

46from pip._internal.utils.hashes import Hashes 

47from pip._internal.utils.misc import ( 

48 ConfiguredBuildBackendHookCaller, 

49 ask_path_exists, 

50 backup_dir, 

51 display_path, 

52 hide_url, 

53 redact_auth_from_url, 

54) 

55from pip._internal.utils.packaging import safe_extra 

56from pip._internal.utils.subprocess import runner_with_spinner_message 

57from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds 

58from pip._internal.utils.virtualenv import running_under_virtualenv 

59from pip._internal.vcs import vcs 

60 

61logger = logging.getLogger(__name__) 

62 

63 

64class InstallRequirement: 

65 """ 

66 Represents something that may be installed later on, may have information 

67 about where to fetch the relevant requirement and also contains logic for 

68 installing the said requirement. 

69 """ 

70 

71 def __init__( 

72 self, 

73 req: Optional[Requirement], 

74 comes_from: Optional[Union[str, "InstallRequirement"]], 

75 editable: bool = False, 

76 link: Optional[Link] = None, 

77 markers: Optional[Marker] = None, 

78 use_pep517: Optional[bool] = None, 

79 isolated: bool = False, 

80 *, 

81 global_options: Optional[List[str]] = None, 

82 hash_options: Optional[Dict[str, List[str]]] = None, 

83 config_settings: Optional[Dict[str, Union[str, List[str]]]] = None, 

84 constraint: bool = False, 

85 extras: Collection[str] = (), 

86 user_supplied: bool = False, 

87 permit_editable_wheels: bool = False, 

88 ) -> None: 

89 assert req is None or isinstance(req, Requirement), req 

90 self.req = req 

91 self.comes_from = comes_from 

92 self.constraint = constraint 

93 self.editable = editable 

94 self.permit_editable_wheels = permit_editable_wheels 

95 

96 # source_dir is the local directory where the linked requirement is 

97 # located, or unpacked. In case unpacking is needed, creating and 

98 # populating source_dir is done by the RequirementPreparer. Note this 

99 # is not necessarily the directory where pyproject.toml or setup.py is 

100 # located - that one is obtained via unpacked_source_directory. 

101 self.source_dir: Optional[str] = None 

102 if self.editable: 

103 assert link 

104 if link.is_file: 

105 self.source_dir = os.path.normpath(os.path.abspath(link.file_path)) 

106 

107 # original_link is the direct URL that was provided by the user for the 

108 # requirement, either directly or via a constraints file. 

109 if link is None and req and req.url: 

110 # PEP 508 URL requirement 

111 link = Link(req.url) 

112 self.link = self.original_link = link 

113 

114 # When this InstallRequirement is a wheel obtained from the cache of locally 

115 # built wheels, this is the source link corresponding to the cache entry, which 

116 # was used to download and build the cached wheel. 

117 self.cached_wheel_source_link: Optional[Link] = None 

118 

119 # Information about the location of the artifact that was downloaded . This 

120 # property is guaranteed to be set in resolver results. 

121 self.download_info: Optional[DirectUrl] = None 

122 

123 # Path to any downloaded or already-existing package. 

124 self.local_file_path: Optional[str] = None 

125 if self.link and self.link.is_file: 

126 self.local_file_path = self.link.file_path 

127 

128 if extras: 

129 self.extras = extras 

130 elif req: 

131 self.extras = {safe_extra(extra) for extra in req.extras} 

132 else: 

133 self.extras = set() 

134 if markers is None and req: 

135 markers = req.marker 

136 self.markers = markers 

137 

138 # This holds the Distribution object if this requirement is already installed. 

139 self.satisfied_by: Optional[BaseDistribution] = None 

140 # Whether the installation process should try to uninstall an existing 

141 # distribution before installing this requirement. 

142 self.should_reinstall = False 

143 # Temporary build location 

144 self._temp_build_dir: Optional[TempDirectory] = None 

145 # Set to True after successful installation 

146 self.install_succeeded: Optional[bool] = None 

147 # Supplied options 

148 self.global_options = global_options if global_options else [] 

149 self.hash_options = hash_options if hash_options else {} 

150 self.config_settings = config_settings 

151 # Set to True after successful preparation of this requirement 

152 self.prepared = False 

153 # User supplied requirement are explicitly requested for installation 

154 # by the user via CLI arguments or requirements files, as opposed to, 

155 # e.g. dependencies, extras or constraints. 

156 self.user_supplied = user_supplied 

157 

158 self.isolated = isolated 

159 self.build_env: BuildEnvironment = NoOpBuildEnvironment() 

160 

161 # For PEP 517, the directory where we request the project metadata 

162 # gets stored. We need this to pass to build_wheel, so the backend 

163 # can ensure that the wheel matches the metadata (see the PEP for 

164 # details). 

165 self.metadata_directory: Optional[str] = None 

166 

167 # The static build requirements (from pyproject.toml) 

168 self.pyproject_requires: Optional[List[str]] = None 

169 

170 # Build requirements that we will check are available 

171 self.requirements_to_check: List[str] = [] 

172 

173 # The PEP 517 backend we should use to build the project 

174 self.pep517_backend: Optional[BuildBackendHookCaller] = None 

175 

176 # Are we using PEP 517 for this requirement? 

177 # After pyproject.toml has been loaded, the only valid values are True 

178 # and False. Before loading, None is valid (meaning "use the default"). 

179 # Setting an explicit value before loading pyproject.toml is supported, 

180 # but after loading this flag should be treated as read only. 

181 self.use_pep517 = use_pep517 

182 

183 # This requirement needs more preparation before it can be built 

184 self.needs_more_preparation = False 

185 

186 def __str__(self) -> str: 

187 if self.req: 

188 s = str(self.req) 

189 if self.link: 

190 s += " from {}".format(redact_auth_from_url(self.link.url)) 

191 elif self.link: 

192 s = redact_auth_from_url(self.link.url) 

193 else: 

194 s = "<InstallRequirement>" 

195 if self.satisfied_by is not None: 

196 if self.satisfied_by.location is not None: 

197 location = display_path(self.satisfied_by.location) 

198 else: 

199 location = "<memory>" 

200 s += f" in {location}" 

201 if self.comes_from: 

202 if isinstance(self.comes_from, str): 

203 comes_from: Optional[str] = self.comes_from 

204 else: 

205 comes_from = self.comes_from.from_path() 

206 if comes_from: 

207 s += f" (from {comes_from})" 

208 return s 

209 

210 def __repr__(self) -> str: 

211 return "<{} object: {} editable={!r}>".format( 

212 self.__class__.__name__, str(self), self.editable 

213 ) 

214 

215 def format_debug(self) -> str: 

216 """An un-tested helper for getting state, for debugging.""" 

217 attributes = vars(self) 

218 names = sorted(attributes) 

219 

220 state = ("{}={!r}".format(attr, attributes[attr]) for attr in sorted(names)) 

221 return "<{name} object: {{{state}}}>".format( 

222 name=self.__class__.__name__, 

223 state=", ".join(state), 

224 ) 

225 

226 # Things that are valid for all kinds of requirements? 

227 @property 

228 def name(self) -> Optional[str]: 

229 if self.req is None: 

230 return None 

231 return self.req.name 

232 

233 @functools.lru_cache() # use cached_property in python 3.8+ 

234 def supports_pyproject_editable(self) -> bool: 

235 if not self.use_pep517: 

236 return False 

237 assert self.pep517_backend 

238 with self.build_env: 

239 runner = runner_with_spinner_message( 

240 "Checking if build backend supports build_editable" 

241 ) 

242 with self.pep517_backend.subprocess_runner(runner): 

243 return "build_editable" in self.pep517_backend._supported_features() 

244 

245 @property 

246 def specifier(self) -> SpecifierSet: 

247 return self.req.specifier 

248 

249 @property 

250 def is_direct(self) -> bool: 

251 """Whether this requirement was specified as a direct URL.""" 

252 return self.original_link is not None 

253 

254 @property 

255 def is_pinned(self) -> bool: 

256 """Return whether I am pinned to an exact version. 

257 

258 For example, some-package==1.2 is pinned; some-package>1.2 is not. 

259 """ 

260 specifiers = self.specifier 

261 return len(specifiers) == 1 and next(iter(specifiers)).operator in {"==", "==="} 

262 

263 def match_markers(self, extras_requested: Optional[Iterable[str]] = None) -> bool: 

264 if not extras_requested: 

265 # Provide an extra to safely evaluate the markers 

266 # without matching any extra 

267 extras_requested = ("",) 

268 if self.markers is not None: 

269 return any( 

270 self.markers.evaluate({"extra": extra}) for extra in extras_requested 

271 ) 

272 else: 

273 return True 

274 

275 @property 

276 def has_hash_options(self) -> bool: 

277 """Return whether any known-good hashes are specified as options. 

278 

279 These activate --require-hashes mode; hashes specified as part of a 

280 URL do not. 

281 

282 """ 

283 return bool(self.hash_options) 

284 

285 def hashes(self, trust_internet: bool = True) -> Hashes: 

286 """Return a hash-comparer that considers my option- and URL-based 

287 hashes to be known-good. 

288 

289 Hashes in URLs--ones embedded in the requirements file, not ones 

290 downloaded from an index server--are almost peers with ones from 

291 flags. They satisfy --require-hashes (whether it was implicitly or 

292 explicitly activated) but do not activate it. md5 and sha224 are not 

293 allowed in flags, which should nudge people toward good algos. We 

294 always OR all hashes together, even ones from URLs. 

295 

296 :param trust_internet: Whether to trust URL-based (#md5=...) hashes 

297 downloaded from the internet, as by populate_link() 

298 

299 """ 

300 good_hashes = self.hash_options.copy() 

301 if trust_internet: 

302 link = self.link 

303 elif self.is_direct and self.user_supplied: 

304 link = self.original_link 

305 else: 

306 link = None 

307 if link and link.hash: 

308 good_hashes.setdefault(link.hash_name, []).append(link.hash) 

309 return Hashes(good_hashes) 

310 

311 def from_path(self) -> Optional[str]: 

312 """Format a nice indicator to show where this "comes from" """ 

313 if self.req is None: 

314 return None 

315 s = str(self.req) 

316 if self.comes_from: 

317 if isinstance(self.comes_from, str): 

318 comes_from = self.comes_from 

319 else: 

320 comes_from = self.comes_from.from_path() 

321 if comes_from: 

322 s += "->" + comes_from 

323 return s 

324 

325 def ensure_build_location( 

326 self, build_dir: str, autodelete: bool, parallel_builds: bool 

327 ) -> str: 

328 assert build_dir is not None 

329 if self._temp_build_dir is not None: 

330 assert self._temp_build_dir.path 

331 return self._temp_build_dir.path 

332 if self.req is None: 

333 # Some systems have /tmp as a symlink which confuses custom 

334 # builds (such as numpy). Thus, we ensure that the real path 

335 # is returned. 

336 self._temp_build_dir = TempDirectory( 

337 kind=tempdir_kinds.REQ_BUILD, globally_managed=True 

338 ) 

339 

340 return self._temp_build_dir.path 

341 

342 # This is the only remaining place where we manually determine the path 

343 # for the temporary directory. It is only needed for editables where 

344 # it is the value of the --src option. 

345 

346 # When parallel builds are enabled, add a UUID to the build directory 

347 # name so multiple builds do not interfere with each other. 

348 dir_name: str = canonicalize_name(self.name) 

349 if parallel_builds: 

350 dir_name = f"{dir_name}_{uuid.uuid4().hex}" 

351 

352 # FIXME: Is there a better place to create the build_dir? (hg and bzr 

353 # need this) 

354 if not os.path.exists(build_dir): 

355 logger.debug("Creating directory %s", build_dir) 

356 os.makedirs(build_dir) 

357 actual_build_dir = os.path.join(build_dir, dir_name) 

358 # `None` indicates that we respect the globally-configured deletion 

359 # settings, which is what we actually want when auto-deleting. 

360 delete_arg = None if autodelete else False 

361 return TempDirectory( 

362 path=actual_build_dir, 

363 delete=delete_arg, 

364 kind=tempdir_kinds.REQ_BUILD, 

365 globally_managed=True, 

366 ).path 

367 

368 def _set_requirement(self) -> None: 

369 """Set requirement after generating metadata.""" 

370 assert self.req is None 

371 assert self.metadata is not None 

372 assert self.source_dir is not None 

373 

374 # Construct a Requirement object from the generated metadata 

375 if isinstance(parse_version(self.metadata["Version"]), Version): 

376 op = "==" 

377 else: 

378 op = "===" 

379 

380 self.req = Requirement( 

381 "".join( 

382 [ 

383 self.metadata["Name"], 

384 op, 

385 self.metadata["Version"], 

386 ] 

387 ) 

388 ) 

389 

390 def warn_on_mismatching_name(self) -> None: 

391 metadata_name = canonicalize_name(self.metadata["Name"]) 

392 if canonicalize_name(self.req.name) == metadata_name: 

393 # Everything is fine. 

394 return 

395 

396 # If we're here, there's a mismatch. Log a warning about it. 

397 logger.warning( 

398 "Generating metadata for package %s " 

399 "produced metadata for project name %s. Fix your " 

400 "#egg=%s fragments.", 

401 self.name, 

402 metadata_name, 

403 self.name, 

404 ) 

405 self.req = Requirement(metadata_name) 

406 

407 def check_if_exists(self, use_user_site: bool) -> None: 

408 """Find an installed distribution that satisfies or conflicts 

409 with this requirement, and set self.satisfied_by or 

410 self.should_reinstall appropriately. 

411 """ 

412 if self.req is None: 

413 return 

414 existing_dist = get_default_environment().get_distribution(self.req.name) 

415 if not existing_dist: 

416 return 

417 

418 version_compatible = self.req.specifier.contains( 

419 existing_dist.version, 

420 prereleases=True, 

421 ) 

422 if not version_compatible: 

423 self.satisfied_by = None 

424 if use_user_site: 

425 if existing_dist.in_usersite: 

426 self.should_reinstall = True 

427 elif running_under_virtualenv() and existing_dist.in_site_packages: 

428 raise InstallationError( 

429 f"Will not install to the user site because it will " 

430 f"lack sys.path precedence to {existing_dist.raw_name} " 

431 f"in {existing_dist.location}" 

432 ) 

433 else: 

434 self.should_reinstall = True 

435 else: 

436 if self.editable: 

437 self.should_reinstall = True 

438 # when installing editables, nothing pre-existing should ever 

439 # satisfy 

440 self.satisfied_by = None 

441 else: 

442 self.satisfied_by = existing_dist 

443 

444 # Things valid for wheels 

445 @property 

446 def is_wheel(self) -> bool: 

447 if not self.link: 

448 return False 

449 return self.link.is_wheel 

450 

451 @property 

452 def is_wheel_from_cache(self) -> bool: 

453 # When True, it means that this InstallRequirement is a local wheel file in the 

454 # cache of locally built wheels. 

455 return self.cached_wheel_source_link is not None 

456 

457 # Things valid for sdists 

458 @property 

459 def unpacked_source_directory(self) -> str: 

460 return os.path.join( 

461 self.source_dir, self.link and self.link.subdirectory_fragment or "" 

462 ) 

463 

464 @property 

465 def setup_py_path(self) -> str: 

466 assert self.source_dir, f"No source dir for {self}" 

467 setup_py = os.path.join(self.unpacked_source_directory, "setup.py") 

468 

469 return setup_py 

470 

471 @property 

472 def setup_cfg_path(self) -> str: 

473 assert self.source_dir, f"No source dir for {self}" 

474 setup_cfg = os.path.join(self.unpacked_source_directory, "setup.cfg") 

475 

476 return setup_cfg 

477 

478 @property 

479 def pyproject_toml_path(self) -> str: 

480 assert self.source_dir, f"No source dir for {self}" 

481 return make_pyproject_path(self.unpacked_source_directory) 

482 

483 def load_pyproject_toml(self) -> None: 

484 """Load the pyproject.toml file. 

485 

486 After calling this routine, all of the attributes related to PEP 517 

487 processing for this requirement have been set. In particular, the 

488 use_pep517 attribute can be used to determine whether we should 

489 follow the PEP 517 or legacy (setup.py) code path. 

490 """ 

491 pyproject_toml_data = load_pyproject_toml( 

492 self.use_pep517, self.pyproject_toml_path, self.setup_py_path, str(self) 

493 ) 

494 

495 if pyproject_toml_data is None: 

496 if self.config_settings: 

497 deprecated( 

498 reason=f"Config settings are ignored for project {self}.", 

499 replacement=( 

500 "to use --use-pep517 or add a " 

501 "pyproject.toml file to the project" 

502 ), 

503 gone_in="23.3", 

504 ) 

505 self.use_pep517 = False 

506 return 

507 

508 self.use_pep517 = True 

509 requires, backend, check, backend_path = pyproject_toml_data 

510 self.requirements_to_check = check 

511 self.pyproject_requires = requires 

512 self.pep517_backend = ConfiguredBuildBackendHookCaller( 

513 self, 

514 self.unpacked_source_directory, 

515 backend, 

516 backend_path=backend_path, 

517 ) 

518 

519 def isolated_editable_sanity_check(self) -> None: 

520 """Check that an editable requirement if valid for use with PEP 517/518. 

521 

522 This verifies that an editable that has a pyproject.toml either supports PEP 660 

523 or as a setup.py or a setup.cfg 

524 """ 

525 if ( 

526 self.editable 

527 and self.use_pep517 

528 and not self.supports_pyproject_editable() 

529 and not os.path.isfile(self.setup_py_path) 

530 and not os.path.isfile(self.setup_cfg_path) 

531 ): 

532 raise InstallationError( 

533 f"Project {self} has a 'pyproject.toml' and its build " 

534 f"backend is missing the 'build_editable' hook. Since it does not " 

535 f"have a 'setup.py' nor a 'setup.cfg', " 

536 f"it cannot be installed in editable mode. " 

537 f"Consider using a build backend that supports PEP 660." 

538 ) 

539 

540 def prepare_metadata(self) -> None: 

541 """Ensure that project metadata is available. 

542 

543 Under PEP 517 and PEP 660, call the backend hook to prepare the metadata. 

544 Under legacy processing, call setup.py egg-info. 

545 """ 

546 assert self.source_dir 

547 details = self.name or f"from {self.link}" 

548 

549 if self.use_pep517: 

550 assert self.pep517_backend is not None 

551 if ( 

552 self.editable 

553 and self.permit_editable_wheels 

554 and self.supports_pyproject_editable() 

555 ): 

556 self.metadata_directory = generate_editable_metadata( 

557 build_env=self.build_env, 

558 backend=self.pep517_backend, 

559 details=details, 

560 ) 

561 else: 

562 self.metadata_directory = generate_metadata( 

563 build_env=self.build_env, 

564 backend=self.pep517_backend, 

565 details=details, 

566 ) 

567 else: 

568 self.metadata_directory = generate_metadata_legacy( 

569 build_env=self.build_env, 

570 setup_py_path=self.setup_py_path, 

571 source_dir=self.unpacked_source_directory, 

572 isolated=self.isolated, 

573 details=details, 

574 ) 

575 

576 # Act on the newly generated metadata, based on the name and version. 

577 if not self.name: 

578 self._set_requirement() 

579 else: 

580 self.warn_on_mismatching_name() 

581 

582 self.assert_source_matches_version() 

583 

584 @property 

585 def metadata(self) -> Any: 

586 if not hasattr(self, "_metadata"): 

587 self._metadata = self.get_dist().metadata 

588 

589 return self._metadata 

590 

591 def get_dist(self) -> BaseDistribution: 

592 if self.metadata_directory: 

593 return get_directory_distribution(self.metadata_directory) 

594 elif self.local_file_path and self.is_wheel: 

595 return get_wheel_distribution( 

596 FilesystemWheel(self.local_file_path), canonicalize_name(self.name) 

597 ) 

598 raise AssertionError( 

599 f"InstallRequirement {self} has no metadata directory and no wheel: " 

600 f"can't make a distribution." 

601 ) 

602 

603 def assert_source_matches_version(self) -> None: 

604 assert self.source_dir 

605 version = self.metadata["version"] 

606 if self.req.specifier and version not in self.req.specifier: 

607 logger.warning( 

608 "Requested %s, but installing version %s", 

609 self, 

610 version, 

611 ) 

612 else: 

613 logger.debug( 

614 "Source in %s has version %s, which satisfies requirement %s", 

615 display_path(self.source_dir), 

616 version, 

617 self, 

618 ) 

619 

620 # For both source distributions and editables 

621 def ensure_has_source_dir( 

622 self, 

623 parent_dir: str, 

624 autodelete: bool = False, 

625 parallel_builds: bool = False, 

626 ) -> None: 

627 """Ensure that a source_dir is set. 

628 

629 This will create a temporary build dir if the name of the requirement 

630 isn't known yet. 

631 

632 :param parent_dir: The ideal pip parent_dir for the source_dir. 

633 Generally src_dir for editables and build_dir for sdists. 

634 :return: self.source_dir 

635 """ 

636 if self.source_dir is None: 

637 self.source_dir = self.ensure_build_location( 

638 parent_dir, 

639 autodelete=autodelete, 

640 parallel_builds=parallel_builds, 

641 ) 

642 

643 # For editable installations 

644 def update_editable(self) -> None: 

645 if not self.link: 

646 logger.debug( 

647 "Cannot update repository at %s; repository location is unknown", 

648 self.source_dir, 

649 ) 

650 return 

651 assert self.editable 

652 assert self.source_dir 

653 if self.link.scheme == "file": 

654 # Static paths don't get updated 

655 return 

656 vcs_backend = vcs.get_backend_for_scheme(self.link.scheme) 

657 # Editable requirements are validated in Requirement constructors. 

658 # So here, if it's neither a path nor a valid VCS URL, it's a bug. 

659 assert vcs_backend, f"Unsupported VCS URL {self.link.url}" 

660 hidden_url = hide_url(self.link.url) 

661 vcs_backend.obtain(self.source_dir, url=hidden_url, verbosity=0) 

662 

663 # Top-level Actions 

664 def uninstall( 

665 self, auto_confirm: bool = False, verbose: bool = False 

666 ) -> Optional[UninstallPathSet]: 

667 """ 

668 Uninstall the distribution currently satisfying this requirement. 

669 

670 Prompts before removing or modifying files unless 

671 ``auto_confirm`` is True. 

672 

673 Refuses to delete or modify files outside of ``sys.prefix`` - 

674 thus uninstallation within a virtual environment can only 

675 modify that virtual environment, even if the virtualenv is 

676 linked to global site-packages. 

677 

678 """ 

679 assert self.req 

680 dist = get_default_environment().get_distribution(self.req.name) 

681 if not dist: 

682 logger.warning("Skipping %s as it is not installed.", self.name) 

683 return None 

684 logger.info("Found existing installation: %s", dist) 

685 

686 uninstalled_pathset = UninstallPathSet.from_dist(dist) 

687 uninstalled_pathset.remove(auto_confirm, verbose) 

688 return uninstalled_pathset 

689 

690 def _get_archive_name(self, path: str, parentdir: str, rootdir: str) -> str: 

691 def _clean_zip_name(name: str, prefix: str) -> str: 

692 assert name.startswith( 

693 prefix + os.path.sep 

694 ), f"name {name!r} doesn't start with prefix {prefix!r}" 

695 name = name[len(prefix) + 1 :] 

696 name = name.replace(os.path.sep, "/") 

697 return name 

698 

699 path = os.path.join(parentdir, path) 

700 name = _clean_zip_name(path, rootdir) 

701 return self.name + "/" + name 

702 

703 def archive(self, build_dir: Optional[str]) -> None: 

704 """Saves archive to provided build_dir. 

705 

706 Used for saving downloaded VCS requirements as part of `pip download`. 

707 """ 

708 assert self.source_dir 

709 if build_dir is None: 

710 return 

711 

712 create_archive = True 

713 archive_name = "{}-{}.zip".format(self.name, self.metadata["version"]) 

714 archive_path = os.path.join(build_dir, archive_name) 

715 

716 if os.path.exists(archive_path): 

717 response = ask_path_exists( 

718 "The file {} exists. (i)gnore, (w)ipe, " 

719 "(b)ackup, (a)bort ".format(display_path(archive_path)), 

720 ("i", "w", "b", "a"), 

721 ) 

722 if response == "i": 

723 create_archive = False 

724 elif response == "w": 

725 logger.warning("Deleting %s", display_path(archive_path)) 

726 os.remove(archive_path) 

727 elif response == "b": 

728 dest_file = backup_dir(archive_path) 

729 logger.warning( 

730 "Backing up %s to %s", 

731 display_path(archive_path), 

732 display_path(dest_file), 

733 ) 

734 shutil.move(archive_path, dest_file) 

735 elif response == "a": 

736 sys.exit(-1) 

737 

738 if not create_archive: 

739 return 

740 

741 zip_output = zipfile.ZipFile( 

742 archive_path, 

743 "w", 

744 zipfile.ZIP_DEFLATED, 

745 allowZip64=True, 

746 ) 

747 with zip_output: 

748 dir = os.path.normcase(os.path.abspath(self.unpacked_source_directory)) 

749 for dirpath, dirnames, filenames in os.walk(dir): 

750 for dirname in dirnames: 

751 dir_arcname = self._get_archive_name( 

752 dirname, 

753 parentdir=dirpath, 

754 rootdir=dir, 

755 ) 

756 zipdir = zipfile.ZipInfo(dir_arcname + "/") 

757 zipdir.external_attr = 0x1ED << 16 # 0o755 

758 zip_output.writestr(zipdir, "") 

759 for filename in filenames: 

760 file_arcname = self._get_archive_name( 

761 filename, 

762 parentdir=dirpath, 

763 rootdir=dir, 

764 ) 

765 filename = os.path.join(dirpath, filename) 

766 zip_output.write(filename, file_arcname) 

767 

768 logger.info("Saved %s", display_path(archive_path)) 

769 

770 def install( 

771 self, 

772 global_options: Optional[Sequence[str]] = None, 

773 root: Optional[str] = None, 

774 home: Optional[str] = None, 

775 prefix: Optional[str] = None, 

776 warn_script_location: bool = True, 

777 use_user_site: bool = False, 

778 pycompile: bool = True, 

779 ) -> None: 

780 scheme = get_scheme( 

781 self.name, 

782 user=use_user_site, 

783 home=home, 

784 root=root, 

785 isolated=self.isolated, 

786 prefix=prefix, 

787 ) 

788 

789 if self.editable and not self.is_wheel: 

790 install_editable_legacy( 

791 global_options=global_options if global_options is not None else [], 

792 prefix=prefix, 

793 home=home, 

794 use_user_site=use_user_site, 

795 name=self.name, 

796 setup_py_path=self.setup_py_path, 

797 isolated=self.isolated, 

798 build_env=self.build_env, 

799 unpacked_source_directory=self.unpacked_source_directory, 

800 ) 

801 self.install_succeeded = True 

802 return 

803 

804 assert self.is_wheel 

805 assert self.local_file_path 

806 

807 install_wheel( 

808 self.name, 

809 self.local_file_path, 

810 scheme=scheme, 

811 req_description=str(self.req), 

812 pycompile=pycompile, 

813 warn_script_location=warn_script_location, 

814 direct_url=self.download_info if self.is_direct else None, 

815 requested=self.user_supplied, 

816 ) 

817 self.install_succeeded = True 

818 

819 

820def check_invalid_constraint_type(req: InstallRequirement) -> str: 

821 # Check for unsupported forms 

822 problem = "" 

823 if not req.name: 

824 problem = "Unnamed requirements are not allowed as constraints" 

825 elif req.editable: 

826 problem = "Editable requirements are not allowed as constraints" 

827 elif req.extras: 

828 problem = "Constraints cannot have extras" 

829 

830 if problem: 

831 deprecated( 

832 reason=( 

833 "Constraints are only allowed to take the form of a package " 

834 "name and a version specifier. Other forms were originally " 

835 "permitted as an accident of the implementation, but were " 

836 "undocumented. The new implementation of the resolver no " 

837 "longer supports these forms." 

838 ), 

839 replacement="replacing the constraint with a requirement", 

840 # No plan yet for when the new resolver becomes default 

841 gone_in=None, 

842 issue=8210, 

843 ) 

844 

845 return problem 

846 

847 

848def _has_option(options: Values, reqs: List[InstallRequirement], option: str) -> bool: 

849 if getattr(options, option, None): 

850 return True 

851 for req in reqs: 

852 if getattr(req, option, None): 

853 return True 

854 return False 

855 

856 

857def check_legacy_setup_py_options( 

858 options: Values, 

859 reqs: List[InstallRequirement], 

860) -> None: 

861 has_build_options = _has_option(options, reqs, "build_options") 

862 has_global_options = _has_option(options, reqs, "global_options") 

863 if has_build_options or has_global_options: 

864 deprecated( 

865 reason="--build-option and --global-option are deprecated.", 

866 issue=11859, 

867 replacement="to use --config-settings", 

868 gone_in="23.3", 

869 ) 

870 logger.warning( 

871 "Implying --no-binary=:all: due to the presence of " 

872 "--build-option / --global-option. " 

873 ) 

874 options.format_control.disallow_binaries()