Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.11/site-packages/pip/_internal/req/req_install.py: 24%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

391 statements  

1from __future__ import annotations 

2 

3import functools 

4import logging 

5import os 

6import shutil 

7import sys 

8import uuid 

9import zipfile 

10from collections.abc import Collection, Iterable 

11from optparse import Values 

12from pathlib import Path 

13from typing import Any 

14 

15from pip._vendor.packaging.markers import Marker 

16from pip._vendor.packaging.requirements import Requirement 

17from pip._vendor.packaging.specifiers import SpecifierSet 

18from pip._vendor.packaging.utils import canonicalize_name 

19from pip._vendor.packaging.version import Version 

20from pip._vendor.packaging.version import parse as parse_version 

21from pip._vendor.pyproject_hooks import BuildBackendHookCaller 

22 

23from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment 

24from pip._internal.exceptions import InstallationError, PreviousBuildDirError 

25from pip._internal.locations import get_scheme 

26from pip._internal.metadata import ( 

27 BaseDistribution, 

28 get_default_environment, 

29 get_directory_distribution, 

30 get_wheel_distribution, 

31) 

32from pip._internal.metadata.base import FilesystemWheel 

33from pip._internal.models.direct_url import DirectUrl 

34from pip._internal.models.link import Link 

35from pip._internal.operations.build.metadata import generate_metadata 

36from pip._internal.operations.build.metadata_editable import generate_editable_metadata 

37from pip._internal.operations.install.wheel import install_wheel 

38from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path 

39from pip._internal.req.req_uninstall import UninstallPathSet 

40from pip._internal.utils.deprecation import deprecated 

41from pip._internal.utils.hashes import Hashes 

42from pip._internal.utils.misc import ( 

43 ConfiguredBuildBackendHookCaller, 

44 ask_path_exists, 

45 backup_dir, 

46 display_path, 

47 hide_url, 

48 is_installable_dir, 

49 redact_auth_from_requirement, 

50 redact_auth_from_url, 

51) 

52from pip._internal.utils.packaging import get_requirement 

53from pip._internal.utils.subprocess import runner_with_spinner_message 

54from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds 

55from pip._internal.utils.unpacking import unpack_file 

56from pip._internal.utils.virtualenv import running_under_virtualenv 

57from pip._internal.vcs import vcs 

58 

59logger = logging.getLogger(__name__) 

60 

61 

62class InstallRequirement: 

63 """ 

64 Represents something that may be installed later on, may have information 

65 about where to fetch the relevant requirement and also contains logic for 

66 installing the said requirement. 

67 """ 

68 

69 def __init__( 

70 self, 

71 req: Requirement | None, 

72 comes_from: str | InstallRequirement | None, 

73 editable: bool = False, 

74 link: Link | None = None, 

75 markers: Marker | None = None, 

76 isolated: bool = False, 

77 *, 

78 hash_options: dict[str, list[str]] | None = None, 

79 config_settings: dict[str, str | list[str]] | None = None, 

80 constraint: bool = False, 

81 extras: Collection[str] = (), 

82 user_supplied: bool = False, 

83 permit_editable_wheels: bool = False, 

84 locked_link: Link | None = None, 

85 locked_version: Version | None = None, 

86 ) -> None: 

87 assert req is None or isinstance(req, Requirement), req 

88 self.req = req 

89 self.comes_from = comes_from 

90 self.constraint = constraint 

91 self.editable = editable 

92 self.permit_editable_wheels = permit_editable_wheels 

93 

94 # source_dir is the local directory where the linked requirement is 

95 # located, or unpacked. In case unpacking is needed, creating and 

96 # populating source_dir is done by the RequirementPreparer. Note this 

97 # is not necessarily the directory where pyproject.toml or setup.py is 

98 # located - that one is obtained via unpacked_source_directory. 

99 self.source_dir: str | None = None 

100 if self.editable: 

101 assert link 

102 if link.is_file: 

103 self.source_dir = os.path.normpath(os.path.abspath(link.file_path)) 

104 

105 # original_link is the direct URL that was provided by the user for the 

106 # requirement, either directly or via a constraints file. 

107 if link is None and req and req.url: 

108 # PEP 508 URL requirement 

109 link = Link(req.url) 

110 self.link = self.original_link = link 

111 

112 # locked_link is the link from the lock file that must be used. 

113 # A locked link InstallRequirement behaves similarly as a regular requirement 

114 # that would be searched in indexes, except its artifact URL is known 

115 # in advance. Notably, and contrarily to direct URL requirements and direct URL 

116 # constraints, they do not cause the recording of direct_url.json. 

117 self.locked_link = locked_link 

118 self.locked_version = locked_version 

119 

120 # When this InstallRequirement is a wheel obtained from the cache of locally 

121 # built wheels, this is the source link corresponding to the cache entry, which 

122 # was used to download and build the cached wheel. 

123 self.cached_wheel_source_link: Link | None = None 

124 

125 # Information about the location of the artifact that was downloaded . This 

126 # property is guaranteed to be set in resolver results. 

127 self.download_info: DirectUrl | None = None 

128 

129 # Path to any downloaded or already-existing package. 

130 self.local_file_path: str | None = None 

131 if self.link and self.link.is_file: 

132 self.local_file_path = self.link.file_path 

133 

134 if extras: 

135 self.extras = extras 

136 elif req: 

137 self.extras = req.extras 

138 else: 

139 self.extras = set() 

140 if markers is None and req: 

141 markers = req.marker 

142 self.markers = markers 

143 

144 # This holds the Distribution object if this requirement is already installed. 

145 self.satisfied_by: BaseDistribution | None = None 

146 # Whether the installation process should try to uninstall an existing 

147 # distribution before installing this requirement. 

148 self.should_reinstall = False 

149 # Temporary build location 

150 self._temp_build_dir: TempDirectory | None = None 

151 # Set to True after successful installation 

152 self.install_succeeded: bool | None = None 

153 # Supplied options 

154 self.hash_options = hash_options if hash_options else {} 

155 self.config_settings = config_settings 

156 # Set to True after successful preparation of this requirement 

157 self.prepared = False 

158 # User supplied requirement are explicitly requested for installation 

159 # by the user via CLI arguments or requirements files, as opposed to, 

160 # e.g. dependencies, extras or constraints. 

161 self.user_supplied = user_supplied 

162 

163 self.isolated = isolated 

164 self.build_env: BuildEnvironment = NoOpBuildEnvironment() 

165 

166 # For PEP 517, the directory where we request the project metadata 

167 # gets stored. We need this to pass to build_wheel, so the backend 

168 # can ensure that the wheel matches the metadata (see the PEP for 

169 # details). 

170 self.metadata_directory: str | None = None 

171 

172 # The cached metadata distribution that this requirement represents. 

173 # See get_dist / set_dist. 

174 self._distribution: BaseDistribution | None = None 

175 

176 # The static build requirements (from pyproject.toml) 

177 self.pyproject_requires: list[str] | None = None 

178 

179 # Build requirements that we will check are available 

180 self.requirements_to_check: list[str] = [] 

181 

182 # The PEP 517 backend we should use to build the project 

183 self.pep517_backend: BuildBackendHookCaller | None = None 

184 

185 # This requirement needs more preparation before it can be built 

186 self.needs_more_preparation = False 

187 

188 # This requirement needs to be unpacked before it can be installed. 

189 self._archive_source: Path | None = None 

190 

191 def __str__(self) -> str: 

192 if self.req: 

193 s = redact_auth_from_requirement(self.req) 

194 if self.link: 

195 s += f" from {redact_auth_from_url(self.link.url)}" 

196 elif self.link: 

197 s = redact_auth_from_url(self.link.url) 

198 else: 

199 s = "<InstallRequirement>" 

200 if self.satisfied_by is not None: 

201 if self.satisfied_by.location is not None: 

202 location = display_path(self.satisfied_by.location) 

203 else: 

204 location = "<memory>" 

205 s += f" in {location}" 

206 if self.comes_from: 

207 if isinstance(self.comes_from, str): 

208 comes_from: str | None = self.comes_from 

209 else: 

210 comes_from = self.comes_from.from_path() 

211 if comes_from: 

212 s += f" (from {comes_from})" 

213 return s 

214 

215 def __repr__(self) -> str: 

216 return ( 

217 f"<{self.__class__.__name__} object: " 

218 f"{str(self)} editable={self.editable!r}>" 

219 ) 

220 

221 def format_debug(self) -> str: 

222 """An un-tested helper for getting state, for debugging.""" 

223 attributes = vars(self) 

224 names = sorted(attributes) 

225 

226 state = (f"{attr}={attributes[attr]!r}" for attr in sorted(names)) 

227 return "<{name} object: {{{state}}}>".format( 

228 name=self.__class__.__name__, 

229 state=", ".join(state), 

230 ) 

231 

232 # Things that are valid for all kinds of requirements? 

233 @property 

234 def name(self) -> str | None: 

235 if self.req is None: 

236 return None 

237 return self.req.name 

238 

239 @functools.cached_property 

240 def supports_pyproject_editable(self) -> bool: 

241 assert self.pep517_backend 

242 with self.build_env: 

243 runner = runner_with_spinner_message( 

244 "Checking if build backend supports build_editable" 

245 ) 

246 with self.pep517_backend.subprocess_runner(runner): 

247 return "build_editable" in self.pep517_backend._supported_features() 

248 

249 @property 

250 def specifier(self) -> SpecifierSet: 

251 assert self.req is not None 

252 return self.req.specifier 

253 

254 @property 

255 def is_direct(self) -> bool: 

256 """Whether this requirement was specified as a direct URL.""" 

257 return self.original_link is not None 

258 

259 @property 

260 def is_pinned(self) -> bool: 

261 """Return whether I am pinned to an exact version. 

262 

263 For example, some-package==1.2 is pinned; some-package>1.2 is not. 

264 """ 

265 assert self.req is not None 

266 specifiers = self.req.specifier 

267 return len(specifiers) == 1 and next(iter(specifiers)).operator in {"==", "==="} 

268 

269 def match_markers(self, extras_requested: Iterable[str] | None = None) -> bool: 

270 if not extras_requested: 

271 # Provide an extra to safely evaluate the markers 

272 # without matching any extra 

273 extras_requested = ("",) 

274 if self.markers is not None: 

275 return any( 

276 self.markers.evaluate({"extra": extra}) for extra in extras_requested 

277 ) 

278 else: 

279 return True 

280 

281 @property 

282 def has_hash_options(self) -> bool: 

283 """Return whether any known-good hashes are specified as options. 

284 

285 These activate --require-hashes mode; hashes specified as part of a 

286 URL do not. 

287 

288 """ 

289 return bool(self.hash_options) 

290 

291 def hashes(self, trust_internet: bool = True) -> Hashes: 

292 """Return a hash-comparer that considers my option- and URL-based 

293 hashes to be known-good. 

294 

295 Hashes in URLs--ones embedded in the requirements file, not ones 

296 downloaded from an index server--are almost peers with ones from 

297 flags. They satisfy --require-hashes (whether it was implicitly or 

298 explicitly activated) but do not activate it. md5 and sha224 are not 

299 allowed in flags, which should nudge people toward good algos. We 

300 always OR all hashes together, even ones from URLs. 

301 

302 :param trust_internet: Whether to trust URL-based (#md5=...) hashes 

303 downloaded from the internet, as by populate_link() 

304 

305 """ 

306 good_hashes = self.hash_options.copy() 

307 if trust_internet: 

308 link = self.link 

309 elif self.is_direct and self.user_supplied: 

310 link = self.original_link 

311 else: 

312 link = None 

313 if link and link.hash: 

314 assert link.hash_name is not None 

315 good_hashes.setdefault(link.hash_name, []).append(link.hash) 

316 return Hashes(good_hashes) 

317 

318 def from_path(self) -> str | None: 

319 """Format a nice indicator to show where this "comes from" """ 

320 if self.req is None: 

321 return None 

322 s = str(self.req) 

323 if self.comes_from: 

324 comes_from: str | None 

325 if isinstance(self.comes_from, str): 

326 comes_from = self.comes_from 

327 else: 

328 comes_from = self.comes_from.from_path() 

329 if comes_from: 

330 s += "->" + comes_from 

331 return s 

332 

333 def ensure_build_location( 

334 self, build_dir: str, autodelete: bool, parallel_builds: bool 

335 ) -> str: 

336 assert build_dir is not None 

337 if self._temp_build_dir is not None: 

338 assert self._temp_build_dir.path 

339 return self._temp_build_dir.path 

340 if self.req is None: 

341 # Some systems have /tmp as a symlink which confuses custom 

342 # builds (such as numpy). Thus, we ensure that the real path 

343 # is returned. 

344 self._temp_build_dir = TempDirectory( 

345 kind=tempdir_kinds.REQ_BUILD, globally_managed=True 

346 ) 

347 

348 return self._temp_build_dir.path 

349 

350 # This is the only remaining place where we manually determine the path 

351 # for the temporary directory. It is only needed for editables where 

352 # it is the value of the --src option. 

353 

354 # When parallel builds are enabled, add a UUID to the build directory 

355 # name so multiple builds do not interfere with each other. 

356 dir_name: str = canonicalize_name(self.req.name) 

357 if parallel_builds: 

358 dir_name = f"{dir_name}_{uuid.uuid4().hex}" 

359 

360 # FIXME: Is there a better place to create the build_dir? (hg and bzr 

361 # need this) 

362 if not os.path.exists(build_dir): 

363 logger.debug("Creating directory %s", build_dir) 

364 os.makedirs(build_dir) 

365 actual_build_dir = os.path.join(build_dir, dir_name) 

366 # `None` indicates that we respect the globally-configured deletion 

367 # settings, which is what we actually want when auto-deleting. 

368 delete_arg = None if autodelete else False 

369 return TempDirectory( 

370 path=actual_build_dir, 

371 delete=delete_arg, 

372 kind=tempdir_kinds.REQ_BUILD, 

373 globally_managed=True, 

374 ).path 

375 

376 def _set_requirement(self) -> None: 

377 """Set requirement after generating metadata.""" 

378 assert self.req is None 

379 assert self.metadata is not None 

380 assert self.source_dir is not None 

381 

382 # Construct a Requirement object from the generated metadata 

383 if isinstance(parse_version(self.metadata["Version"]), Version): 

384 op = "==" 

385 else: 

386 op = "===" 

387 

388 self.req = get_requirement( 

389 "".join( 

390 [ 

391 self.metadata["Name"], 

392 op, 

393 self.metadata["Version"], 

394 ] 

395 ) 

396 ) 

397 

398 def warn_on_mismatching_name(self) -> None: 

399 assert self.req is not None 

400 metadata_name = canonicalize_name(self.metadata["Name"]) 

401 if canonicalize_name(self.req.name) == metadata_name: 

402 # Everything is fine. 

403 return 

404 

405 # If we're here, there's a mismatch. Log a warning about it. 

406 logger.warning( 

407 "Generating metadata for package %s " 

408 "produced metadata for project name %s. Fix your " 

409 "#egg=%s fragments.", 

410 self.name, 

411 metadata_name, 

412 self.name, 

413 ) 

414 self.req = get_requirement(metadata_name) 

415 

416 def check_if_exists(self, use_user_site: bool) -> None: 

417 """Find an installed distribution that satisfies or conflicts 

418 with this requirement, and set self.satisfied_by or 

419 self.should_reinstall appropriately. 

420 """ 

421 if self.req is None: 

422 return 

423 existing_dist = get_default_environment().get_distribution(self.req.name) 

424 if not existing_dist: 

425 return 

426 

427 version_compatible = self.req.specifier.contains( 

428 existing_dist.version, 

429 prereleases=True, 

430 ) 

431 if not version_compatible: 

432 self.satisfied_by = None 

433 if use_user_site: 

434 if existing_dist.in_usersite: 

435 self.should_reinstall = True 

436 elif running_under_virtualenv() and existing_dist.in_site_packages: 

437 raise InstallationError( 

438 f"Will not install to the user site because it will " 

439 f"lack sys.path precedence to {existing_dist.raw_name} " 

440 f"in {existing_dist.location}" 

441 ) 

442 else: 

443 self.should_reinstall = True 

444 else: 

445 if self.editable: 

446 self.should_reinstall = True 

447 # when installing editables, nothing pre-existing should ever 

448 # satisfy 

449 self.satisfied_by = None 

450 else: 

451 self.satisfied_by = existing_dist 

452 

453 # Things valid for wheels 

454 @property 

455 def is_wheel(self) -> bool: 

456 if not self.link: 

457 return False 

458 return self.link.is_wheel 

459 

460 @property 

461 def is_wheel_from_cache(self) -> bool: 

462 # When True, it means that this InstallRequirement is a local wheel file in the 

463 # cache of locally built wheels. 

464 return self.cached_wheel_source_link is not None 

465 

466 # Things valid for sdists 

467 @property 

468 def unpacked_source_directory(self) -> str: 

469 assert self.source_dir, f"No source dir for {self}" 

470 return os.path.join( 

471 self.source_dir, self.link and self.link.subdirectory_fragment or "" 

472 ) 

473 

474 @property 

475 def setup_py_path(self) -> str: 

476 assert self.source_dir, f"No source dir for {self}" 

477 setup_py = os.path.join(self.unpacked_source_directory, "setup.py") 

478 

479 return setup_py 

480 

481 @property 

482 def pyproject_toml_path(self) -> str: 

483 assert self.source_dir, f"No source dir for {self}" 

484 return make_pyproject_path(self.unpacked_source_directory) 

485 

486 def load_pyproject_toml(self) -> None: 

487 """Load the pyproject.toml file. 

488 

489 After calling this routine, all of the attributes related to PEP 517 

490 processing for this requirement have been set. 

491 """ 

492 pyproject_toml_data = load_pyproject_toml( 

493 self.pyproject_toml_path, self.setup_py_path, str(self) 

494 ) 

495 assert pyproject_toml_data 

496 requires, backend, check, backend_path = pyproject_toml_data 

497 self.requirements_to_check = check 

498 self.pyproject_requires = requires 

499 self.pep517_backend = ConfiguredBuildBackendHookCaller( 

500 self, 

501 self.unpacked_source_directory, 

502 backend, 

503 backend_path=backend_path, 

504 ) 

505 

506 def editable_sanity_check(self) -> None: 

507 """Check that an editable requirement if valid for use with PEP 517/518. 

508 

509 This verifies that an editable has a build backend that supports PEP 660. 

510 """ 

511 if self.editable and not self.supports_pyproject_editable: 

512 raise InstallationError( 

513 f"Project {self} uses a build backend " 

514 f"that is missing the 'build_editable' hook, so " 

515 f"it cannot be installed in editable mode. " 

516 f"Consider using a build backend that supports PEP 660." 

517 ) 

518 

519 def prepare_metadata(self) -> None: 

520 """Ensure that project metadata is available. 

521 

522 Under PEP 517 and PEP 660, call the backend hook to prepare the metadata. 

523 Under legacy processing, call setup.py egg-info. 

524 """ 

525 assert self.source_dir, f"No source dir for {self}" 

526 details = self.name or f"from {self.link}" 

527 

528 assert self.pep517_backend is not None 

529 if ( 

530 self.editable 

531 and self.permit_editable_wheels 

532 and self.supports_pyproject_editable 

533 ): 

534 self.metadata_directory = generate_editable_metadata( 

535 build_env=self.build_env, 

536 backend=self.pep517_backend, 

537 details=details, 

538 ) 

539 else: 

540 self.metadata_directory = generate_metadata( 

541 build_env=self.build_env, 

542 backend=self.pep517_backend, 

543 details=details, 

544 ) 

545 

546 # Act on the newly generated metadata, based on the name and version. 

547 if not self.name: 

548 self._set_requirement() 

549 else: 

550 self.warn_on_mismatching_name() 

551 

552 self.assert_source_matches_version() 

553 

554 @property 

555 def metadata(self) -> Any: 

556 if not hasattr(self, "_metadata"): 

557 self._metadata = self.get_dist().metadata 

558 

559 return self._metadata 

560 

561 def set_dist(self, distribution: BaseDistribution) -> None: 

562 self._distribution = distribution 

563 

564 def get_dist(self) -> BaseDistribution: 

565 if self._distribution is not None: 

566 return self._distribution 

567 elif self.metadata_directory: 

568 return get_directory_distribution(self.metadata_directory) 

569 elif self.local_file_path and self.is_wheel: 

570 assert self.req is not None 

571 return get_wheel_distribution( 

572 FilesystemWheel(self.local_file_path), 

573 canonicalize_name(self.req.name), 

574 ) 

575 raise AssertionError( 

576 f"InstallRequirement {self} has no metadata directory and no wheel: " 

577 f"can't make a distribution." 

578 ) 

579 

580 def assert_source_matches_version(self) -> None: 

581 assert self.source_dir, f"No source dir for {self}" 

582 version = self.metadata["version"] 

583 if self.req and self.req.specifier and version not in self.req.specifier: 

584 logger.warning( 

585 "Requested %s, but installing version %s", 

586 self, 

587 version, 

588 ) 

589 else: 

590 logger.debug( 

591 "Source in %s has version %s, which satisfies requirement %s", 

592 display_path(self.source_dir), 

593 version, 

594 self, 

595 ) 

596 

597 # For both source distributions and editables 

598 def ensure_has_source_dir( 

599 self, 

600 parent_dir: str, 

601 autodelete: bool = False, 

602 parallel_builds: bool = False, 

603 ) -> None: 

604 """Ensure that a source_dir is set. 

605 

606 This will create a temporary build dir if the name of the requirement 

607 isn't known yet. 

608 

609 :param parent_dir: The ideal pip parent_dir for the source_dir. 

610 Generally src_dir for editables and build_dir for sdists. 

611 :return: self.source_dir 

612 """ 

613 if self.source_dir is None: 

614 self.source_dir = self.ensure_build_location( 

615 parent_dir, 

616 autodelete=autodelete, 

617 parallel_builds=parallel_builds, 

618 ) 

619 

620 def needs_unpacked_archive(self, archive_source: Path) -> None: 

621 assert self._archive_source is None 

622 self._archive_source = archive_source 

623 

624 def ensure_pristine_source_checkout(self) -> None: 

625 """Ensure the source directory has not yet been built in.""" 

626 assert self.source_dir is not None 

627 if self._archive_source is not None: 

628 unpack_file(str(self._archive_source), self.source_dir) 

629 elif is_installable_dir(self.source_dir): 

630 # If a checkout exists, it's unwise to keep going. 

631 # version inconsistencies are logged later, but do not fail 

632 # the installation. 

633 raise PreviousBuildDirError( 

634 f"pip can't proceed with requirements '{self}' due to a " 

635 f"pre-existing build directory ({self.source_dir}). This is likely " 

636 "due to a previous installation that failed . pip is " 

637 "being responsible and not assuming it can delete this. " 

638 "Please delete it and try again." 

639 ) 

640 

641 # For editable installations 

642 def update_editable(self) -> None: 

643 if not self.link: 

644 logger.debug( 

645 "Cannot update repository at %s; repository location is unknown", 

646 self.source_dir, 

647 ) 

648 return 

649 assert self.editable 

650 assert self.source_dir 

651 if self.link.scheme == "file": 

652 # Static paths don't get updated 

653 return 

654 vcs_backend = vcs.get_backend_for_scheme(self.link.scheme) 

655 # Editable requirements are validated in Requirement constructors. 

656 # So here, if it's neither a path nor a valid VCS URL, it's a bug. 

657 assert vcs_backend, f"Unsupported VCS URL {self.link.url}" 

658 hidden_url = hide_url(self.link.url) 

659 vcs_backend.obtain(self.source_dir, url=hidden_url, verbosity=0) 

660 

661 # Top-level Actions 

662 def uninstall( 

663 self, auto_confirm: bool = False, verbose: bool = False 

664 ) -> UninstallPathSet | None: 

665 """ 

666 Uninstall the distribution currently satisfying this requirement. 

667 

668 Prompts before removing or modifying files unless 

669 ``auto_confirm`` is True. 

670 

671 Refuses to delete or modify files outside of ``sys.prefix`` - 

672 thus uninstallation within a virtual environment can only 

673 modify that virtual environment, even if the virtualenv is 

674 linked to global site-packages. 

675 

676 """ 

677 assert self.req 

678 dist = get_default_environment().get_distribution(self.req.name) 

679 if not dist: 

680 logger.warning("Skipping %s as it is not installed.", self.name) 

681 return None 

682 logger.info("Found existing installation: %s", dist) 

683 

684 uninstalled_pathset = UninstallPathSet.from_dist(dist) 

685 uninstalled_pathset.remove(auto_confirm, verbose) 

686 return uninstalled_pathset 

687 

688 def _get_archive_name(self, path: str, parentdir: str, rootdir: str) -> str: 

689 def _clean_zip_name(name: str, prefix: str) -> str: 

690 assert name.startswith( 

691 prefix + os.path.sep 

692 ), f"name {name!r} doesn't start with prefix {prefix!r}" 

693 name = name[len(prefix) + 1 :] 

694 name = name.replace(os.path.sep, "/") 

695 return name 

696 

697 assert self.req is not None 

698 path = os.path.join(parentdir, path) 

699 name = _clean_zip_name(path, rootdir) 

700 return self.req.name + "/" + name 

701 

702 def archive(self, build_dir: str | None) -> None: 

703 """Saves archive to provided build_dir. 

704 

705 Used for saving downloaded VCS requirements as part of `pip download`. 

706 """ 

707 assert self.source_dir 

708 if build_dir is None: 

709 return 

710 

711 create_archive = True 

712 archive_name = "{}-{}.zip".format(self.name, self.metadata["version"]) 

713 archive_path = os.path.join(build_dir, archive_name) 

714 

715 if os.path.exists(archive_path): 

716 response = ask_path_exists( 

717 f"The file {display_path(archive_path)} exists. (i)gnore, (w)ipe, " 

718 "(b)ackup, (a)bort ", 

719 ("i", "w", "b", "a"), 

720 ) 

721 if response == "i": 

722 create_archive = False 

723 elif response == "w": 

724 logger.warning("Deleting %s", display_path(archive_path)) 

725 os.remove(archive_path) 

726 elif response == "b": 

727 dest_file = backup_dir(archive_path) 

728 logger.warning( 

729 "Backing up %s to %s", 

730 display_path(archive_path), 

731 display_path(dest_file), 

732 ) 

733 shutil.move(archive_path, dest_file) 

734 elif response == "a": 

735 sys.exit(-1) 

736 

737 if not create_archive: 

738 return 

739 

740 zip_output = zipfile.ZipFile( 

741 archive_path, 

742 "w", 

743 zipfile.ZIP_DEFLATED, 

744 allowZip64=True, 

745 ) 

746 with zip_output: 

747 dir = os.path.normcase(os.path.abspath(self.unpacked_source_directory)) 

748 for dirpath, dirnames, filenames in os.walk(dir): 

749 for dirname in dirnames: 

750 dir_arcname = self._get_archive_name( 

751 dirname, 

752 parentdir=dirpath, 

753 rootdir=dir, 

754 ) 

755 zipdir = zipfile.ZipInfo(dir_arcname + "/") 

756 zipdir.external_attr = 0x1ED << 16 # 0o755 

757 zip_output.writestr(zipdir, "") 

758 for filename in filenames: 

759 file_arcname = self._get_archive_name( 

760 filename, 

761 parentdir=dirpath, 

762 rootdir=dir, 

763 ) 

764 filename = os.path.join(dirpath, filename) 

765 zip_output.write(filename, file_arcname) 

766 

767 logger.info("Saved %s", display_path(archive_path)) 

768 

769 def install( 

770 self, 

771 root: str | None = None, 

772 home: str | None = None, 

773 prefix: str | None = None, 

774 warn_script_location: bool = True, 

775 use_user_site: bool = False, 

776 pycompile: bool = True, 

777 ) -> None: 

778 assert self.req is not None 

779 scheme = get_scheme( 

780 self.req.name, 

781 user=use_user_site, 

782 home=home, 

783 root=root, 

784 isolated=self.isolated, 

785 prefix=prefix, 

786 ) 

787 

788 assert self.is_wheel 

789 assert self.local_file_path 

790 

791 install_wheel( 

792 self.req.name, 

793 self.local_file_path, 

794 scheme=scheme, 

795 req_description=str(self.req), 

796 pycompile=pycompile, 

797 warn_script_location=warn_script_location, 

798 direct_url=self.download_info if self.is_direct else None, 

799 requested=self.user_supplied, 

800 ) 

801 self.install_succeeded = True 

802 

803 

804def check_invalid_constraint_type(req: InstallRequirement) -> str: 

805 # Check for unsupported forms 

806 problem = "" 

807 if not req.name: 

808 problem = "Unnamed requirements are not allowed as constraints" 

809 elif req.editable: 

810 problem = "Editable requirements are not allowed as constraints" 

811 elif req.extras: 

812 problem = "Constraints cannot have extras" 

813 

814 if problem: 

815 deprecated( 

816 reason=( 

817 "Constraints are only allowed to take the form of a package " 

818 "name and a version specifier. Other forms were originally " 

819 "permitted as an accident of the implementation, but were " 

820 "undocumented. The new implementation of the resolver no " 

821 "longer supports these forms." 

822 ), 

823 replacement="replacing the constraint with a requirement", 

824 # No plan yet for when the new resolver becomes default 

825 gone_in=None, 

826 issue=8210, 

827 ) 

828 

829 return problem 

830 

831 

832def _has_option(options: Values, reqs: list[InstallRequirement], option: str) -> bool: 

833 if getattr(options, option, None): 

834 return True 

835 for req in reqs: 

836 if getattr(req, option, None): 

837 return True 

838 return False