Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/pip/_vendor/pkg_resources/__init__.py: 47%

1573 statements  

« prev     ^ index     » next       coverage.py v7.4.3, created at 2024-02-26 06:33 +0000

1""" 

2Package resource API 

3-------------------- 

4 

5A resource is a logical file contained within a package, or a logical 

6subdirectory thereof. The package resource API expects resource names 

7to have their path parts separated with ``/``, *not* whatever the local 

8path separator is. Do not use os.path operations to manipulate resource 

9names being passed into the API. 

10 

11The package resource API is designed to work with normal filesystem packages, 

12.egg files, and unpacked .egg files. It can also work in a limited way with 

13.zip files and with custom PEP 302 loaders that support the ``get_data()`` 

14method. 

15 

16This module is deprecated. Users are directed to :mod:`importlib.resources`, 

17:mod:`importlib.metadata` and :pypi:`packaging` instead. 

18""" 

19 

20import sys 

21import os 

22import io 

23import time 

24import re 

25import types 

26import zipfile 

27import zipimport 

28import warnings 

29import stat 

30import functools 

31import pkgutil 

32import operator 

33import platform 

34import collections 

35import plistlib 

36import email.parser 

37import errno 

38import tempfile 

39import textwrap 

40import inspect 

41import ntpath 

42import posixpath 

43import importlib 

44from pkgutil import get_importer 

45 

46try: 

47 import _imp 

48except ImportError: 

49 # Python 3.2 compatibility 

50 import imp as _imp 

51 

52try: 

53 FileExistsError 

54except NameError: 

55 FileExistsError = OSError 

56 

57# capture these to bypass sandboxing 

58from os import utime 

59 

60try: 

61 from os import mkdir, rename, unlink 

62 

63 WRITE_SUPPORT = True 

64except ImportError: 

65 # no write support, probably under GAE 

66 WRITE_SUPPORT = False 

67 

68from os import open as os_open 

69from os.path import isdir, split 

70 

71try: 

72 import importlib.machinery as importlib_machinery 

73 

74 # access attribute to force import under delayed import mechanisms. 

75 importlib_machinery.__name__ 

76except ImportError: 

77 importlib_machinery = None 

78 

79from pip._internal.utils._jaraco_text import ( 

80 yield_lines, 

81 drop_comment, 

82 join_continuation, 

83) 

84 

85from pip._vendor import platformdirs 

86from pip._vendor import packaging 

87 

88__import__('pip._vendor.packaging.version') 

89__import__('pip._vendor.packaging.specifiers') 

90__import__('pip._vendor.packaging.requirements') 

91__import__('pip._vendor.packaging.markers') 

92__import__('pip._vendor.packaging.utils') 

93 

94if sys.version_info < (3, 5): 

95 raise RuntimeError("Python 3.5 or later is required") 

96 

97# declare some globals that will be defined later to 

98# satisfy the linters. 

99require = None 

100working_set = None 

101add_activation_listener = None 

102resources_stream = None 

103cleanup_resources = None 

104resource_dir = None 

105resource_stream = None 

106set_extraction_path = None 

107resource_isdir = None 

108resource_string = None 

109iter_entry_points = None 

110resource_listdir = None 

111resource_filename = None 

112resource_exists = None 

113_distribution_finders = None 

114_namespace_handlers = None 

115_namespace_packages = None 

116 

117 

118warnings.warn( 

119 "pkg_resources is deprecated as an API. " 

120 "See https://setuptools.pypa.io/en/latest/pkg_resources.html", 

121 DeprecationWarning, 

122 stacklevel=2 

123) 

124 

125 

126_PEP440_FALLBACK = re.compile(r"^v?(?P<safe>(?:[0-9]+!)?[0-9]+(?:\.[0-9]+)*)", re.I) 

127 

128 

129class PEP440Warning(RuntimeWarning): 

130 """ 

131 Used when there is an issue with a version or specifier not complying with 

132 PEP 440. 

133 """ 

134 

135 

136parse_version = packaging.version.Version 

137 

138 

139_state_vars = {} 

140 

141 

142def _declare_state(vartype, **kw): 

143 globals().update(kw) 

144 _state_vars.update(dict.fromkeys(kw, vartype)) 

145 

146 

147def __getstate__(): 

148 state = {} 

149 g = globals() 

150 for k, v in _state_vars.items(): 

151 state[k] = g['_sget_' + v](g[k]) 

152 return state 

153 

154 

155def __setstate__(state): 

156 g = globals() 

157 for k, v in state.items(): 

158 g['_sset_' + _state_vars[k]](k, g[k], v) 

159 return state 

160 

161 

162def _sget_dict(val): 

163 return val.copy() 

164 

165 

166def _sset_dict(key, ob, state): 

167 ob.clear() 

168 ob.update(state) 

169 

170 

171def _sget_object(val): 

172 return val.__getstate__() 

173 

174 

175def _sset_object(key, ob, state): 

176 ob.__setstate__(state) 

177 

178 

179_sget_none = _sset_none = lambda *args: None 

180 

181 

182def get_supported_platform(): 

183 """Return this platform's maximum compatible version. 

184 

185 distutils.util.get_platform() normally reports the minimum version 

186 of macOS that would be required to *use* extensions produced by 

187 distutils. But what we want when checking compatibility is to know the 

188 version of macOS that we are *running*. To allow usage of packages that 

189 explicitly require a newer version of macOS, we must also know the 

190 current version of the OS. 

191 

192 If this condition occurs for any other platform with a version in its 

193 platform strings, this function should be extended accordingly. 

194 """ 

195 plat = get_build_platform() 

196 m = macosVersionString.match(plat) 

197 if m is not None and sys.platform == "darwin": 

198 try: 

199 plat = 'macosx-%s-%s' % ('.'.join(_macos_vers()[:2]), m.group(3)) 

200 except ValueError: 

201 # not macOS 

202 pass 

203 return plat 

204 

205 

206__all__ = [ 

207 # Basic resource access and distribution/entry point discovery 

208 'require', 

209 'run_script', 

210 'get_provider', 

211 'get_distribution', 

212 'load_entry_point', 

213 'get_entry_map', 

214 'get_entry_info', 

215 'iter_entry_points', 

216 'resource_string', 

217 'resource_stream', 

218 'resource_filename', 

219 'resource_listdir', 

220 'resource_exists', 

221 'resource_isdir', 

222 # Environmental control 

223 'declare_namespace', 

224 'working_set', 

225 'add_activation_listener', 

226 'find_distributions', 

227 'set_extraction_path', 

228 'cleanup_resources', 

229 'get_default_cache', 

230 # Primary implementation classes 

231 'Environment', 

232 'WorkingSet', 

233 'ResourceManager', 

234 'Distribution', 

235 'Requirement', 

236 'EntryPoint', 

237 # Exceptions 

238 'ResolutionError', 

239 'VersionConflict', 

240 'DistributionNotFound', 

241 'UnknownExtra', 

242 'ExtractionError', 

243 # Warnings 

244 'PEP440Warning', 

245 # Parsing functions and string utilities 

246 'parse_requirements', 

247 'parse_version', 

248 'safe_name', 

249 'safe_version', 

250 'get_platform', 

251 'compatible_platforms', 

252 'yield_lines', 

253 'split_sections', 

254 'safe_extra', 

255 'to_filename', 

256 'invalid_marker', 

257 'evaluate_marker', 

258 # filesystem utilities 

259 'ensure_directory', 

260 'normalize_path', 

261 # Distribution "precedence" constants 

262 'EGG_DIST', 

263 'BINARY_DIST', 

264 'SOURCE_DIST', 

265 'CHECKOUT_DIST', 

266 'DEVELOP_DIST', 

267 # "Provider" interfaces, implementations, and registration/lookup APIs 

268 'IMetadataProvider', 

269 'IResourceProvider', 

270 'FileMetadata', 

271 'PathMetadata', 

272 'EggMetadata', 

273 'EmptyProvider', 

274 'empty_provider', 

275 'NullProvider', 

276 'EggProvider', 

277 'DefaultProvider', 

278 'ZipProvider', 

279 'register_finder', 

280 'register_namespace_handler', 

281 'register_loader_type', 

282 'fixup_namespace_packages', 

283 'get_importer', 

284 # Warnings 

285 'PkgResourcesDeprecationWarning', 

286 # Deprecated/backward compatibility only 

287 'run_main', 

288 'AvailableDistributions', 

289] 

290 

291 

292class ResolutionError(Exception): 

293 """Abstract base for dependency resolution errors""" 

294 

295 def __repr__(self): 

296 return self.__class__.__name__ + repr(self.args) 

297 

298 

299class VersionConflict(ResolutionError): 

300 """ 

301 An already-installed version conflicts with the requested version. 

302 

303 Should be initialized with the installed Distribution and the requested 

304 Requirement. 

305 """ 

306 

307 _template = "{self.dist} is installed but {self.req} is required" 

308 

309 @property 

310 def dist(self): 

311 return self.args[0] 

312 

313 @property 

314 def req(self): 

315 return self.args[1] 

316 

317 def report(self): 

318 return self._template.format(**locals()) 

319 

320 def with_context(self, required_by): 

321 """ 

322 If required_by is non-empty, return a version of self that is a 

323 ContextualVersionConflict. 

324 """ 

325 if not required_by: 

326 return self 

327 args = self.args + (required_by,) 

328 return ContextualVersionConflict(*args) 

329 

330 

331class ContextualVersionConflict(VersionConflict): 

332 """ 

333 A VersionConflict that accepts a third parameter, the set of the 

334 requirements that required the installed Distribution. 

335 """ 

336 

337 _template = VersionConflict._template + ' by {self.required_by}' 

338 

339 @property 

340 def required_by(self): 

341 return self.args[2] 

342 

343 

344class DistributionNotFound(ResolutionError): 

345 """A requested distribution was not found""" 

346 

347 _template = ( 

348 "The '{self.req}' distribution was not found " 

349 "and is required by {self.requirers_str}" 

350 ) 

351 

352 @property 

353 def req(self): 

354 return self.args[0] 

355 

356 @property 

357 def requirers(self): 

358 return self.args[1] 

359 

360 @property 

361 def requirers_str(self): 

362 if not self.requirers: 

363 return 'the application' 

364 return ', '.join(self.requirers) 

365 

366 def report(self): 

367 return self._template.format(**locals()) 

368 

369 def __str__(self): 

370 return self.report() 

371 

372 

373class UnknownExtra(ResolutionError): 

374 """Distribution doesn't have an "extra feature" of the given name""" 

375 

376 

377_provider_factories = {} 

378 

379PY_MAJOR = '{}.{}'.format(*sys.version_info) 

380EGG_DIST = 3 

381BINARY_DIST = 2 

382SOURCE_DIST = 1 

383CHECKOUT_DIST = 0 

384DEVELOP_DIST = -1 

385 

386 

387def register_loader_type(loader_type, provider_factory): 

388 """Register `provider_factory` to make providers for `loader_type` 

389 

390 `loader_type` is the type or class of a PEP 302 ``module.__loader__``, 

391 and `provider_factory` is a function that, passed a *module* object, 

392 returns an ``IResourceProvider`` for that module. 

393 """ 

394 _provider_factories[loader_type] = provider_factory 

395 

396 

397def get_provider(moduleOrReq): 

398 """Return an IResourceProvider for the named module or requirement""" 

399 if isinstance(moduleOrReq, Requirement): 

400 return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0] 

401 try: 

402 module = sys.modules[moduleOrReq] 

403 except KeyError: 

404 __import__(moduleOrReq) 

405 module = sys.modules[moduleOrReq] 

406 loader = getattr(module, '__loader__', None) 

407 return _find_adapter(_provider_factories, loader)(module) 

408 

409 

410def _macos_vers(_cache=[]): 

411 if not _cache: 

412 version = platform.mac_ver()[0] 

413 # fallback for MacPorts 

414 if version == '': 

415 plist = '/System/Library/CoreServices/SystemVersion.plist' 

416 if os.path.exists(plist): 

417 if hasattr(plistlib, 'readPlist'): 

418 plist_content = plistlib.readPlist(plist) 

419 if 'ProductVersion' in plist_content: 

420 version = plist_content['ProductVersion'] 

421 

422 _cache.append(version.split('.')) 

423 return _cache[0] 

424 

425 

426def _macos_arch(machine): 

427 return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine) 

428 

429 

430def get_build_platform(): 

431 """Return this platform's string for platform-specific distributions 

432 

433 XXX Currently this is the same as ``distutils.util.get_platform()``, but it 

434 needs some hacks for Linux and macOS. 

435 """ 

436 from sysconfig import get_platform 

437 

438 plat = get_platform() 

439 if sys.platform == "darwin" and not plat.startswith('macosx-'): 

440 try: 

441 version = _macos_vers() 

442 machine = os.uname()[4].replace(" ", "_") 

443 return "macosx-%d.%d-%s" % ( 

444 int(version[0]), 

445 int(version[1]), 

446 _macos_arch(machine), 

447 ) 

448 except ValueError: 

449 # if someone is running a non-Mac darwin system, this will fall 

450 # through to the default implementation 

451 pass 

452 return plat 

453 

454 

455macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)") 

456darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)") 

457# XXX backward compat 

458get_platform = get_build_platform 

459 

460 

461def compatible_platforms(provided, required): 

462 """Can code for the `provided` platform run on the `required` platform? 

463 

464 Returns true if either platform is ``None``, or the platforms are equal. 

465 

466 XXX Needs compatibility checks for Linux and other unixy OSes. 

467 """ 

468 if provided is None or required is None or provided == required: 

469 # easy case 

470 return True 

471 

472 # macOS special cases 

473 reqMac = macosVersionString.match(required) 

474 if reqMac: 

475 provMac = macosVersionString.match(provided) 

476 

477 # is this a Mac package? 

478 if not provMac: 

479 # this is backwards compatibility for packages built before 

480 # setuptools 0.6. All packages built after this point will 

481 # use the new macOS designation. 

482 provDarwin = darwinVersionString.match(provided) 

483 if provDarwin: 

484 dversion = int(provDarwin.group(1)) 

485 macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2)) 

486 if ( 

487 dversion == 7 

488 and macosversion >= "10.3" 

489 or dversion == 8 

490 and macosversion >= "10.4" 

491 ): 

492 return True 

493 # egg isn't macOS or legacy darwin 

494 return False 

495 

496 # are they the same major version and machine type? 

497 if provMac.group(1) != reqMac.group(1) or provMac.group(3) != reqMac.group(3): 

498 return False 

499 

500 # is the required OS major update >= the provided one? 

501 if int(provMac.group(2)) > int(reqMac.group(2)): 

502 return False 

503 

504 return True 

505 

506 # XXX Linux and other platforms' special cases should go here 

507 return False 

508 

509 

510def run_script(dist_spec, script_name): 

511 """Locate distribution `dist_spec` and run its `script_name` script""" 

512 ns = sys._getframe(1).f_globals 

513 name = ns['__name__'] 

514 ns.clear() 

515 ns['__name__'] = name 

516 require(dist_spec)[0].run_script(script_name, ns) 

517 

518 

519# backward compatibility 

520run_main = run_script 

521 

522 

523def get_distribution(dist): 

524 """Return a current distribution object for a Requirement or string""" 

525 if isinstance(dist, str): 

526 dist = Requirement.parse(dist) 

527 if isinstance(dist, Requirement): 

528 dist = get_provider(dist) 

529 if not isinstance(dist, Distribution): 

530 raise TypeError("Expected string, Requirement, or Distribution", dist) 

531 return dist 

532 

533 

534def load_entry_point(dist, group, name): 

535 """Return `name` entry point of `group` for `dist` or raise ImportError""" 

536 return get_distribution(dist).load_entry_point(group, name) 

537 

538 

539def get_entry_map(dist, group=None): 

540 """Return the entry point map for `group`, or the full entry map""" 

541 return get_distribution(dist).get_entry_map(group) 

542 

543 

544def get_entry_info(dist, group, name): 

545 """Return the EntryPoint object for `group`+`name`, or ``None``""" 

546 return get_distribution(dist).get_entry_info(group, name) 

547 

548 

549class IMetadataProvider: 

550 def has_metadata(name): 

551 """Does the package's distribution contain the named metadata?""" 

552 

553 def get_metadata(name): 

554 """The named metadata resource as a string""" 

555 

556 def get_metadata_lines(name): 

557 """Yield named metadata resource as list of non-blank non-comment lines 

558 

559 Leading and trailing whitespace is stripped from each line, and lines 

560 with ``#`` as the first non-blank character are omitted.""" 

561 

562 def metadata_isdir(name): 

563 """Is the named metadata a directory? (like ``os.path.isdir()``)""" 

564 

565 def metadata_listdir(name): 

566 """List of metadata names in the directory (like ``os.listdir()``)""" 

567 

568 def run_script(script_name, namespace): 

569 """Execute the named script in the supplied namespace dictionary""" 

570 

571 

572class IResourceProvider(IMetadataProvider): 

573 """An object that provides access to package resources""" 

574 

575 def get_resource_filename(manager, resource_name): 

576 """Return a true filesystem path for `resource_name` 

577 

578 `manager` must be an ``IResourceManager``""" 

579 

580 def get_resource_stream(manager, resource_name): 

581 """Return a readable file-like object for `resource_name` 

582 

583 `manager` must be an ``IResourceManager``""" 

584 

585 def get_resource_string(manager, resource_name): 

586 """Return a string containing the contents of `resource_name` 

587 

588 `manager` must be an ``IResourceManager``""" 

589 

590 def has_resource(resource_name): 

591 """Does the package contain the named resource?""" 

592 

593 def resource_isdir(resource_name): 

594 """Is the named resource a directory? (like ``os.path.isdir()``)""" 

595 

596 def resource_listdir(resource_name): 

597 """List of resource names in the directory (like ``os.listdir()``)""" 

598 

599 

600class WorkingSet: 

601 """A collection of active distributions on sys.path (or a similar list)""" 

602 

603 def __init__(self, entries=None): 

604 """Create working set from list of path entries (default=sys.path)""" 

605 self.entries = [] 

606 self.entry_keys = {} 

607 self.by_key = {} 

608 self.normalized_to_canonical_keys = {} 

609 self.callbacks = [] 

610 

611 if entries is None: 

612 entries = sys.path 

613 

614 for entry in entries: 

615 self.add_entry(entry) 

616 

617 @classmethod 

618 def _build_master(cls): 

619 """ 

620 Prepare the master working set. 

621 """ 

622 ws = cls() 

623 try: 

624 from __main__ import __requires__ 

625 except ImportError: 

626 # The main program does not list any requirements 

627 return ws 

628 

629 # ensure the requirements are met 

630 try: 

631 ws.require(__requires__) 

632 except VersionConflict: 

633 return cls._build_from_requirements(__requires__) 

634 

635 return ws 

636 

637 @classmethod 

638 def _build_from_requirements(cls, req_spec): 

639 """ 

640 Build a working set from a requirement spec. Rewrites sys.path. 

641 """ 

642 # try it without defaults already on sys.path 

643 # by starting with an empty path 

644 ws = cls([]) 

645 reqs = parse_requirements(req_spec) 

646 dists = ws.resolve(reqs, Environment()) 

647 for dist in dists: 

648 ws.add(dist) 

649 

650 # add any missing entries from sys.path 

651 for entry in sys.path: 

652 if entry not in ws.entries: 

653 ws.add_entry(entry) 

654 

655 # then copy back to sys.path 

656 sys.path[:] = ws.entries 

657 return ws 

658 

659 def add_entry(self, entry): 

660 """Add a path item to ``.entries``, finding any distributions on it 

661 

662 ``find_distributions(entry, True)`` is used to find distributions 

663 corresponding to the path entry, and they are added. `entry` is 

664 always appended to ``.entries``, even if it is already present. 

665 (This is because ``sys.path`` can contain the same value more than 

666 once, and the ``.entries`` of the ``sys.path`` WorkingSet should always 

667 equal ``sys.path``.) 

668 """ 

669 self.entry_keys.setdefault(entry, []) 

670 self.entries.append(entry) 

671 for dist in find_distributions(entry, True): 

672 self.add(dist, entry, False) 

673 

674 def __contains__(self, dist): 

675 """True if `dist` is the active distribution for its project""" 

676 return self.by_key.get(dist.key) == dist 

677 

678 def find(self, req): 

679 """Find a distribution matching requirement `req` 

680 

681 If there is an active distribution for the requested project, this 

682 returns it as long as it meets the version requirement specified by 

683 `req`. But, if there is an active distribution for the project and it 

684 does *not* meet the `req` requirement, ``VersionConflict`` is raised. 

685 If there is no active distribution for the requested project, ``None`` 

686 is returned. 

687 """ 

688 dist = self.by_key.get(req.key) 

689 

690 if dist is None: 

691 canonical_key = self.normalized_to_canonical_keys.get(req.key) 

692 

693 if canonical_key is not None: 

694 req.key = canonical_key 

695 dist = self.by_key.get(canonical_key) 

696 

697 if dist is not None and dist not in req: 

698 # XXX add more info 

699 raise VersionConflict(dist, req) 

700 return dist 

701 

702 def iter_entry_points(self, group, name=None): 

703 """Yield entry point objects from `group` matching `name` 

704 

705 If `name` is None, yields all entry points in `group` from all 

706 distributions in the working set, otherwise only ones matching 

707 both `group` and `name` are yielded (in distribution order). 

708 """ 

709 return ( 

710 entry 

711 for dist in self 

712 for entry in dist.get_entry_map(group).values() 

713 if name is None or name == entry.name 

714 ) 

715 

716 def run_script(self, requires, script_name): 

717 """Locate distribution for `requires` and run `script_name` script""" 

718 ns = sys._getframe(1).f_globals 

719 name = ns['__name__'] 

720 ns.clear() 

721 ns['__name__'] = name 

722 self.require(requires)[0].run_script(script_name, ns) 

723 

724 def __iter__(self): 

725 """Yield distributions for non-duplicate projects in the working set 

726 

727 The yield order is the order in which the items' path entries were 

728 added to the working set. 

729 """ 

730 seen = {} 

731 for item in self.entries: 

732 if item not in self.entry_keys: 

733 # workaround a cache issue 

734 continue 

735 

736 for key in self.entry_keys[item]: 

737 if key not in seen: 

738 seen[key] = 1 

739 yield self.by_key[key] 

740 

741 def add(self, dist, entry=None, insert=True, replace=False): 

742 """Add `dist` to working set, associated with `entry` 

743 

744 If `entry` is unspecified, it defaults to the ``.location`` of `dist`. 

745 On exit from this routine, `entry` is added to the end of the working 

746 set's ``.entries`` (if it wasn't already present). 

747 

748 `dist` is only added to the working set if it's for a project that 

749 doesn't already have a distribution in the set, unless `replace=True`. 

750 If it's added, any callbacks registered with the ``subscribe()`` method 

751 will be called. 

752 """ 

753 if insert: 

754 dist.insert_on(self.entries, entry, replace=replace) 

755 

756 if entry is None: 

757 entry = dist.location 

758 keys = self.entry_keys.setdefault(entry, []) 

759 keys2 = self.entry_keys.setdefault(dist.location, []) 

760 if not replace and dist.key in self.by_key: 

761 # ignore hidden distros 

762 return 

763 

764 self.by_key[dist.key] = dist 

765 normalized_name = packaging.utils.canonicalize_name(dist.key) 

766 self.normalized_to_canonical_keys[normalized_name] = dist.key 

767 if dist.key not in keys: 

768 keys.append(dist.key) 

769 if dist.key not in keys2: 

770 keys2.append(dist.key) 

771 self._added_new(dist) 

772 

773 def resolve( 

774 self, 

775 requirements, 

776 env=None, 

777 installer=None, 

778 replace_conflicting=False, 

779 extras=None, 

780 ): 

781 """List all distributions needed to (recursively) meet `requirements` 

782 

783 `requirements` must be a sequence of ``Requirement`` objects. `env`, 

784 if supplied, should be an ``Environment`` instance. If 

785 not supplied, it defaults to all distributions available within any 

786 entry or distribution in the working set. `installer`, if supplied, 

787 will be invoked with each requirement that cannot be met by an 

788 already-installed distribution; it should return a ``Distribution`` or 

789 ``None``. 

790 

791 Unless `replace_conflicting=True`, raises a VersionConflict exception 

792 if 

793 any requirements are found on the path that have the correct name but 

794 the wrong version. Otherwise, if an `installer` is supplied it will be 

795 invoked to obtain the correct version of the requirement and activate 

796 it. 

797 

798 `extras` is a list of the extras to be used with these requirements. 

799 This is important because extra requirements may look like `my_req; 

800 extra = "my_extra"`, which would otherwise be interpreted as a purely 

801 optional requirement. Instead, we want to be able to assert that these 

802 requirements are truly required. 

803 """ 

804 

805 # set up the stack 

806 requirements = list(requirements)[::-1] 

807 # set of processed requirements 

808 processed = {} 

809 # key -> dist 

810 best = {} 

811 to_activate = [] 

812 

813 req_extras = _ReqExtras() 

814 

815 # Mapping of requirement to set of distributions that required it; 

816 # useful for reporting info about conflicts. 

817 required_by = collections.defaultdict(set) 

818 

819 while requirements: 

820 # process dependencies breadth-first 

821 req = requirements.pop(0) 

822 if req in processed: 

823 # Ignore cyclic or redundant dependencies 

824 continue 

825 

826 if not req_extras.markers_pass(req, extras): 

827 continue 

828 

829 dist = self._resolve_dist( 

830 req, best, replace_conflicting, env, installer, required_by, to_activate 

831 ) 

832 

833 # push the new requirements onto the stack 

834 new_requirements = dist.requires(req.extras)[::-1] 

835 requirements.extend(new_requirements) 

836 

837 # Register the new requirements needed by req 

838 for new_requirement in new_requirements: 

839 required_by[new_requirement].add(req.project_name) 

840 req_extras[new_requirement] = req.extras 

841 

842 processed[req] = True 

843 

844 # return list of distros to activate 

845 return to_activate 

846 

847 def _resolve_dist( 

848 self, req, best, replace_conflicting, env, installer, required_by, to_activate 

849 ): 

850 dist = best.get(req.key) 

851 if dist is None: 

852 # Find the best distribution and add it to the map 

853 dist = self.by_key.get(req.key) 

854 if dist is None or (dist not in req and replace_conflicting): 

855 ws = self 

856 if env is None: 

857 if dist is None: 

858 env = Environment(self.entries) 

859 else: 

860 # Use an empty environment and workingset to avoid 

861 # any further conflicts with the conflicting 

862 # distribution 

863 env = Environment([]) 

864 ws = WorkingSet([]) 

865 dist = best[req.key] = env.best_match( 

866 req, ws, installer, replace_conflicting=replace_conflicting 

867 ) 

868 if dist is None: 

869 requirers = required_by.get(req, None) 

870 raise DistributionNotFound(req, requirers) 

871 to_activate.append(dist) 

872 if dist not in req: 

873 # Oops, the "best" so far conflicts with a dependency 

874 dependent_req = required_by[req] 

875 raise VersionConflict(dist, req).with_context(dependent_req) 

876 return dist 

877 

878 def find_plugins(self, plugin_env, full_env=None, installer=None, fallback=True): 

879 """Find all activatable distributions in `plugin_env` 

880 

881 Example usage:: 

882 

883 distributions, errors = working_set.find_plugins( 

884 Environment(plugin_dirlist) 

885 ) 

886 # add plugins+libs to sys.path 

887 map(working_set.add, distributions) 

888 # display errors 

889 print('Could not load', errors) 

890 

891 The `plugin_env` should be an ``Environment`` instance that contains 

892 only distributions that are in the project's "plugin directory" or 

893 directories. The `full_env`, if supplied, should be an ``Environment`` 

894 contains all currently-available distributions. If `full_env` is not 

895 supplied, one is created automatically from the ``WorkingSet`` this 

896 method is called on, which will typically mean that every directory on 

897 ``sys.path`` will be scanned for distributions. 

898 

899 `installer` is a standard installer callback as used by the 

900 ``resolve()`` method. The `fallback` flag indicates whether we should 

901 attempt to resolve older versions of a plugin if the newest version 

902 cannot be resolved. 

903 

904 This method returns a 2-tuple: (`distributions`, `error_info`), where 

905 `distributions` is a list of the distributions found in `plugin_env` 

906 that were loadable, along with any other distributions that are needed 

907 to resolve their dependencies. `error_info` is a dictionary mapping 

908 unloadable plugin distributions to an exception instance describing the 

909 error that occurred. Usually this will be a ``DistributionNotFound`` or 

910 ``VersionConflict`` instance. 

911 """ 

912 

913 plugin_projects = list(plugin_env) 

914 # scan project names in alphabetic order 

915 plugin_projects.sort() 

916 

917 error_info = {} 

918 distributions = {} 

919 

920 if full_env is None: 

921 env = Environment(self.entries) 

922 env += plugin_env 

923 else: 

924 env = full_env + plugin_env 

925 

926 shadow_set = self.__class__([]) 

927 # put all our entries in shadow_set 

928 list(map(shadow_set.add, self)) 

929 

930 for project_name in plugin_projects: 

931 for dist in plugin_env[project_name]: 

932 req = [dist.as_requirement()] 

933 

934 try: 

935 resolvees = shadow_set.resolve(req, env, installer) 

936 

937 except ResolutionError as v: 

938 # save error info 

939 error_info[dist] = v 

940 if fallback: 

941 # try the next older version of project 

942 continue 

943 else: 

944 # give up on this project, keep going 

945 break 

946 

947 else: 

948 list(map(shadow_set.add, resolvees)) 

949 distributions.update(dict.fromkeys(resolvees)) 

950 

951 # success, no need to try any more versions of this project 

952 break 

953 

954 distributions = list(distributions) 

955 distributions.sort() 

956 

957 return distributions, error_info 

958 

959 def require(self, *requirements): 

960 """Ensure that distributions matching `requirements` are activated 

961 

962 `requirements` must be a string or a (possibly-nested) sequence 

963 thereof, specifying the distributions and versions required. The 

964 return value is a sequence of the distributions that needed to be 

965 activated to fulfill the requirements; all relevant distributions are 

966 included, even if they were already activated in this working set. 

967 """ 

968 needed = self.resolve(parse_requirements(requirements)) 

969 

970 for dist in needed: 

971 self.add(dist) 

972 

973 return needed 

974 

975 def subscribe(self, callback, existing=True): 

976 """Invoke `callback` for all distributions 

977 

978 If `existing=True` (default), 

979 call on all existing ones, as well. 

980 """ 

981 if callback in self.callbacks: 

982 return 

983 self.callbacks.append(callback) 

984 if not existing: 

985 return 

986 for dist in self: 

987 callback(dist) 

988 

989 def _added_new(self, dist): 

990 for callback in self.callbacks: 

991 callback(dist) 

992 

993 def __getstate__(self): 

994 return ( 

995 self.entries[:], 

996 self.entry_keys.copy(), 

997 self.by_key.copy(), 

998 self.normalized_to_canonical_keys.copy(), 

999 self.callbacks[:], 

1000 ) 

1001 

1002 def __setstate__(self, e_k_b_n_c): 

1003 entries, keys, by_key, normalized_to_canonical_keys, callbacks = e_k_b_n_c 

1004 self.entries = entries[:] 

1005 self.entry_keys = keys.copy() 

1006 self.by_key = by_key.copy() 

1007 self.normalized_to_canonical_keys = normalized_to_canonical_keys.copy() 

1008 self.callbacks = callbacks[:] 

1009 

1010 

1011class _ReqExtras(dict): 

1012 """ 

1013 Map each requirement to the extras that demanded it. 

1014 """ 

1015 

1016 def markers_pass(self, req, extras=None): 

1017 """ 

1018 Evaluate markers for req against each extra that 

1019 demanded it. 

1020 

1021 Return False if the req has a marker and fails 

1022 evaluation. Otherwise, return True. 

1023 """ 

1024 extra_evals = ( 

1025 req.marker.evaluate({'extra': extra}) 

1026 for extra in self.get(req, ()) + (extras or (None,)) 

1027 ) 

1028 return not req.marker or any(extra_evals) 

1029 

1030 

1031class Environment: 

1032 """Searchable snapshot of distributions on a search path""" 

1033 

1034 def __init__( 

1035 self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR 

1036 ): 

1037 """Snapshot distributions available on a search path 

1038 

1039 Any distributions found on `search_path` are added to the environment. 

1040 `search_path` should be a sequence of ``sys.path`` items. If not 

1041 supplied, ``sys.path`` is used. 

1042 

1043 `platform` is an optional string specifying the name of the platform 

1044 that platform-specific distributions must be compatible with. If 

1045 unspecified, it defaults to the current platform. `python` is an 

1046 optional string naming the desired version of Python (e.g. ``'3.6'``); 

1047 it defaults to the current version. 

1048 

1049 You may explicitly set `platform` (and/or `python`) to ``None`` if you 

1050 wish to map *all* distributions, not just those compatible with the 

1051 running platform or Python version. 

1052 """ 

1053 self._distmap = {} 

1054 self.platform = platform 

1055 self.python = python 

1056 self.scan(search_path) 

1057 

1058 def can_add(self, dist): 

1059 """Is distribution `dist` acceptable for this environment? 

1060 

1061 The distribution must match the platform and python version 

1062 requirements specified when this environment was created, or False 

1063 is returned. 

1064 """ 

1065 py_compat = ( 

1066 self.python is None 

1067 or dist.py_version is None 

1068 or dist.py_version == self.python 

1069 ) 

1070 return py_compat and compatible_platforms(dist.platform, self.platform) 

1071 

1072 def remove(self, dist): 

1073 """Remove `dist` from the environment""" 

1074 self._distmap[dist.key].remove(dist) 

1075 

1076 def scan(self, search_path=None): 

1077 """Scan `search_path` for distributions usable in this environment 

1078 

1079 Any distributions found are added to the environment. 

1080 `search_path` should be a sequence of ``sys.path`` items. If not 

1081 supplied, ``sys.path`` is used. Only distributions conforming to 

1082 the platform/python version defined at initialization are added. 

1083 """ 

1084 if search_path is None: 

1085 search_path = sys.path 

1086 

1087 for item in search_path: 

1088 for dist in find_distributions(item): 

1089 self.add(dist) 

1090 

1091 def __getitem__(self, project_name): 

1092 """Return a newest-to-oldest list of distributions for `project_name` 

1093 

1094 Uses case-insensitive `project_name` comparison, assuming all the 

1095 project's distributions use their project's name converted to all 

1096 lowercase as their key. 

1097 

1098 """ 

1099 distribution_key = project_name.lower() 

1100 return self._distmap.get(distribution_key, []) 

1101 

1102 def add(self, dist): 

1103 """Add `dist` if we ``can_add()`` it and it has not already been added""" 

1104 if self.can_add(dist) and dist.has_version(): 

1105 dists = self._distmap.setdefault(dist.key, []) 

1106 if dist not in dists: 

1107 dists.append(dist) 

1108 dists.sort(key=operator.attrgetter('hashcmp'), reverse=True) 

1109 

1110 def best_match(self, req, working_set, installer=None, replace_conflicting=False): 

1111 """Find distribution best matching `req` and usable on `working_set` 

1112 

1113 This calls the ``find(req)`` method of the `working_set` to see if a 

1114 suitable distribution is already active. (This may raise 

1115 ``VersionConflict`` if an unsuitable version of the project is already 

1116 active in the specified `working_set`.) If a suitable distribution 

1117 isn't active, this method returns the newest distribution in the 

1118 environment that meets the ``Requirement`` in `req`. If no suitable 

1119 distribution is found, and `installer` is supplied, then the result of 

1120 calling the environment's ``obtain(req, installer)`` method will be 

1121 returned. 

1122 """ 

1123 try: 

1124 dist = working_set.find(req) 

1125 except VersionConflict: 

1126 if not replace_conflicting: 

1127 raise 

1128 dist = None 

1129 if dist is not None: 

1130 return dist 

1131 for dist in self[req.key]: 

1132 if dist in req: 

1133 return dist 

1134 # try to download/install 

1135 return self.obtain(req, installer) 

1136 

1137 def obtain(self, requirement, installer=None): 

1138 """Obtain a distribution matching `requirement` (e.g. via download) 

1139 

1140 Obtain a distro that matches requirement (e.g. via download). In the 

1141 base ``Environment`` class, this routine just returns 

1142 ``installer(requirement)``, unless `installer` is None, in which case 

1143 None is returned instead. This method is a hook that allows subclasses 

1144 to attempt other ways of obtaining a distribution before falling back 

1145 to the `installer` argument.""" 

1146 if installer is not None: 

1147 return installer(requirement) 

1148 

1149 def __iter__(self): 

1150 """Yield the unique project names of the available distributions""" 

1151 for key in self._distmap.keys(): 

1152 if self[key]: 

1153 yield key 

1154 

1155 def __iadd__(self, other): 

1156 """In-place addition of a distribution or environment""" 

1157 if isinstance(other, Distribution): 

1158 self.add(other) 

1159 elif isinstance(other, Environment): 

1160 for project in other: 

1161 for dist in other[project]: 

1162 self.add(dist) 

1163 else: 

1164 raise TypeError("Can't add %r to environment" % (other,)) 

1165 return self 

1166 

1167 def __add__(self, other): 

1168 """Add an environment or distribution to an environment""" 

1169 new = self.__class__([], platform=None, python=None) 

1170 for env in self, other: 

1171 new += env 

1172 return new 

1173 

1174 

1175# XXX backward compatibility 

1176AvailableDistributions = Environment 

1177 

1178 

1179class ExtractionError(RuntimeError): 

1180 """An error occurred extracting a resource 

1181 

1182 The following attributes are available from instances of this exception: 

1183 

1184 manager 

1185 The resource manager that raised this exception 

1186 

1187 cache_path 

1188 The base directory for resource extraction 

1189 

1190 original_error 

1191 The exception instance that caused extraction to fail 

1192 """ 

1193 

1194 

1195class ResourceManager: 

1196 """Manage resource extraction and packages""" 

1197 

1198 extraction_path = None 

1199 

1200 def __init__(self): 

1201 self.cached_files = {} 

1202 

1203 def resource_exists(self, package_or_requirement, resource_name): 

1204 """Does the named resource exist?""" 

1205 return get_provider(package_or_requirement).has_resource(resource_name) 

1206 

1207 def resource_isdir(self, package_or_requirement, resource_name): 

1208 """Is the named resource an existing directory?""" 

1209 return get_provider(package_or_requirement).resource_isdir(resource_name) 

1210 

1211 def resource_filename(self, package_or_requirement, resource_name): 

1212 """Return a true filesystem path for specified resource""" 

1213 return get_provider(package_or_requirement).get_resource_filename( 

1214 self, resource_name 

1215 ) 

1216 

1217 def resource_stream(self, package_or_requirement, resource_name): 

1218 """Return a readable file-like object for specified resource""" 

1219 return get_provider(package_or_requirement).get_resource_stream( 

1220 self, resource_name 

1221 ) 

1222 

1223 def resource_string(self, package_or_requirement, resource_name): 

1224 """Return specified resource as a string""" 

1225 return get_provider(package_or_requirement).get_resource_string( 

1226 self, resource_name 

1227 ) 

1228 

1229 def resource_listdir(self, package_or_requirement, resource_name): 

1230 """List the contents of the named resource directory""" 

1231 return get_provider(package_or_requirement).resource_listdir(resource_name) 

1232 

1233 def extraction_error(self): 

1234 """Give an error message for problems extracting file(s)""" 

1235 

1236 old_exc = sys.exc_info()[1] 

1237 cache_path = self.extraction_path or get_default_cache() 

1238 

1239 tmpl = textwrap.dedent( 

1240 """ 

1241 Can't extract file(s) to egg cache 

1242 

1243 The following error occurred while trying to extract file(s) 

1244 to the Python egg cache: 

1245 

1246 {old_exc} 

1247 

1248 The Python egg cache directory is currently set to: 

1249 

1250 {cache_path} 

1251 

1252 Perhaps your account does not have write access to this directory? 

1253 You can change the cache directory by setting the PYTHON_EGG_CACHE 

1254 environment variable to point to an accessible directory. 

1255 """ 

1256 ).lstrip() 

1257 err = ExtractionError(tmpl.format(**locals())) 

1258 err.manager = self 

1259 err.cache_path = cache_path 

1260 err.original_error = old_exc 

1261 raise err 

1262 

1263 def get_cache_path(self, archive_name, names=()): 

1264 """Return absolute location in cache for `archive_name` and `names` 

1265 

1266 The parent directory of the resulting path will be created if it does 

1267 not already exist. `archive_name` should be the base filename of the 

1268 enclosing egg (which may not be the name of the enclosing zipfile!), 

1269 including its ".egg" extension. `names`, if provided, should be a 

1270 sequence of path name parts "under" the egg's extraction location. 

1271 

1272 This method should only be called by resource providers that need to 

1273 obtain an extraction location, and only for names they intend to 

1274 extract, as it tracks the generated names for possible cleanup later. 

1275 """ 

1276 extract_path = self.extraction_path or get_default_cache() 

1277 target_path = os.path.join(extract_path, archive_name + '-tmp', *names) 

1278 try: 

1279 _bypass_ensure_directory(target_path) 

1280 except Exception: 

1281 self.extraction_error() 

1282 

1283 self._warn_unsafe_extraction_path(extract_path) 

1284 

1285 self.cached_files[target_path] = 1 

1286 return target_path 

1287 

1288 @staticmethod 

1289 def _warn_unsafe_extraction_path(path): 

1290 """ 

1291 If the default extraction path is overridden and set to an insecure 

1292 location, such as /tmp, it opens up an opportunity for an attacker to 

1293 replace an extracted file with an unauthorized payload. Warn the user 

1294 if a known insecure location is used. 

1295 

1296 See Distribute #375 for more details. 

1297 """ 

1298 if os.name == 'nt' and not path.startswith(os.environ['windir']): 

1299 # On Windows, permissions are generally restrictive by default 

1300 # and temp directories are not writable by other users, so 

1301 # bypass the warning. 

1302 return 

1303 mode = os.stat(path).st_mode 

1304 if mode & stat.S_IWOTH or mode & stat.S_IWGRP: 

1305 msg = ( 

1306 "Extraction path is writable by group/others " 

1307 "and vulnerable to attack when " 

1308 "used with get_resource_filename ({path}). " 

1309 "Consider a more secure " 

1310 "location (set with .set_extraction_path or the " 

1311 "PYTHON_EGG_CACHE environment variable)." 

1312 ).format(**locals()) 

1313 warnings.warn(msg, UserWarning) 

1314 

1315 def postprocess(self, tempname, filename): 

1316 """Perform any platform-specific postprocessing of `tempname` 

1317 

1318 This is where Mac header rewrites should be done; other platforms don't 

1319 have anything special they should do. 

1320 

1321 Resource providers should call this method ONLY after successfully 

1322 extracting a compressed resource. They must NOT call it on resources 

1323 that are already in the filesystem. 

1324 

1325 `tempname` is the current (temporary) name of the file, and `filename` 

1326 is the name it will be renamed to by the caller after this routine 

1327 returns. 

1328 """ 

1329 

1330 if os.name == 'posix': 

1331 # Make the resource executable 

1332 mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777 

1333 os.chmod(tempname, mode) 

1334 

1335 def set_extraction_path(self, path): 

1336 """Set the base path where resources will be extracted to, if needed. 

1337 

1338 If you do not call this routine before any extractions take place, the 

1339 path defaults to the return value of ``get_default_cache()``. (Which 

1340 is based on the ``PYTHON_EGG_CACHE`` environment variable, with various 

1341 platform-specific fallbacks. See that routine's documentation for more 

1342 details.) 

1343 

1344 Resources are extracted to subdirectories of this path based upon 

1345 information given by the ``IResourceProvider``. You may set this to a 

1346 temporary directory, but then you must call ``cleanup_resources()`` to 

1347 delete the extracted files when done. There is no guarantee that 

1348 ``cleanup_resources()`` will be able to remove all extracted files. 

1349 

1350 (Note: you may not change the extraction path for a given resource 

1351 manager once resources have been extracted, unless you first call 

1352 ``cleanup_resources()``.) 

1353 """ 

1354 if self.cached_files: 

1355 raise ValueError("Can't change extraction path, files already extracted") 

1356 

1357 self.extraction_path = path 

1358 

1359 def cleanup_resources(self, force=False): 

1360 """ 

1361 Delete all extracted resource files and directories, returning a list 

1362 of the file and directory names that could not be successfully removed. 

1363 This function does not have any concurrency protection, so it should 

1364 generally only be called when the extraction path is a temporary 

1365 directory exclusive to a single process. This method is not 

1366 automatically called; you must call it explicitly or register it as an 

1367 ``atexit`` function if you wish to ensure cleanup of a temporary 

1368 directory used for extractions. 

1369 """ 

1370 # XXX 

1371 

1372 

1373def get_default_cache(): 

1374 """ 

1375 Return the ``PYTHON_EGG_CACHE`` environment variable 

1376 or a platform-relevant user cache dir for an app 

1377 named "Python-Eggs". 

1378 """ 

1379 return os.environ.get('PYTHON_EGG_CACHE') or platformdirs.user_cache_dir( 

1380 appname='Python-Eggs' 

1381 ) 

1382 

1383 

1384def safe_name(name): 

1385 """Convert an arbitrary string to a standard distribution name 

1386 

1387 Any runs of non-alphanumeric/. characters are replaced with a single '-'. 

1388 """ 

1389 return re.sub('[^A-Za-z0-9.]+', '-', name) 

1390 

1391 

1392def safe_version(version): 

1393 """ 

1394 Convert an arbitrary string to a standard version string 

1395 """ 

1396 try: 

1397 # normalize the version 

1398 return str(packaging.version.Version(version)) 

1399 except packaging.version.InvalidVersion: 

1400 version = version.replace(' ', '.') 

1401 return re.sub('[^A-Za-z0-9.]+', '-', version) 

1402 

1403 

1404def _forgiving_version(version): 

1405 """Fallback when ``safe_version`` is not safe enough 

1406 >>> parse_version(_forgiving_version('0.23ubuntu1')) 

1407 <Version('0.23.dev0+sanitized.ubuntu1')> 

1408 >>> parse_version(_forgiving_version('0.23-')) 

1409 <Version('0.23.dev0+sanitized')> 

1410 >>> parse_version(_forgiving_version('0.-_')) 

1411 <Version('0.dev0+sanitized')> 

1412 >>> parse_version(_forgiving_version('42.+?1')) 

1413 <Version('42.dev0+sanitized.1')> 

1414 >>> parse_version(_forgiving_version('hello world')) 

1415 <Version('0.dev0+sanitized.hello.world')> 

1416 """ 

1417 version = version.replace(' ', '.') 

1418 match = _PEP440_FALLBACK.search(version) 

1419 if match: 

1420 safe = match["safe"] 

1421 rest = version[len(safe):] 

1422 else: 

1423 safe = "0" 

1424 rest = version 

1425 local = f"sanitized.{_safe_segment(rest)}".strip(".") 

1426 return f"{safe}.dev0+{local}" 

1427 

1428 

1429def _safe_segment(segment): 

1430 """Convert an arbitrary string into a safe segment""" 

1431 segment = re.sub('[^A-Za-z0-9.]+', '-', segment) 

1432 segment = re.sub('-[^A-Za-z0-9]+', '-', segment) 

1433 return re.sub(r'\.[^A-Za-z0-9]+', '.', segment).strip(".-") 

1434 

1435 

1436def safe_extra(extra): 

1437 """Convert an arbitrary string to a standard 'extra' name 

1438 

1439 Any runs of non-alphanumeric characters are replaced with a single '_', 

1440 and the result is always lowercased. 

1441 """ 

1442 return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower() 

1443 

1444 

1445def to_filename(name): 

1446 """Convert a project or version name to its filename-escaped form 

1447 

1448 Any '-' characters are currently replaced with '_'. 

1449 """ 

1450 return name.replace('-', '_') 

1451 

1452 

1453def invalid_marker(text): 

1454 """ 

1455 Validate text as a PEP 508 environment marker; return an exception 

1456 if invalid or False otherwise. 

1457 """ 

1458 try: 

1459 evaluate_marker(text) 

1460 except SyntaxError as e: 

1461 e.filename = None 

1462 e.lineno = None 

1463 return e 

1464 return False 

1465 

1466 

1467def evaluate_marker(text, extra=None): 

1468 """ 

1469 Evaluate a PEP 508 environment marker. 

1470 Return a boolean indicating the marker result in this environment. 

1471 Raise SyntaxError if marker is invalid. 

1472 

1473 This implementation uses the 'pyparsing' module. 

1474 """ 

1475 try: 

1476 marker = packaging.markers.Marker(text) 

1477 return marker.evaluate() 

1478 except packaging.markers.InvalidMarker as e: 

1479 raise SyntaxError(e) from e 

1480 

1481 

1482class NullProvider: 

1483 """Try to implement resources and metadata for arbitrary PEP 302 loaders""" 

1484 

1485 egg_name = None 

1486 egg_info = None 

1487 loader = None 

1488 

1489 def __init__(self, module): 

1490 self.loader = getattr(module, '__loader__', None) 

1491 self.module_path = os.path.dirname(getattr(module, '__file__', '')) 

1492 

1493 def get_resource_filename(self, manager, resource_name): 

1494 return self._fn(self.module_path, resource_name) 

1495 

1496 def get_resource_stream(self, manager, resource_name): 

1497 return io.BytesIO(self.get_resource_string(manager, resource_name)) 

1498 

1499 def get_resource_string(self, manager, resource_name): 

1500 return self._get(self._fn(self.module_path, resource_name)) 

1501 

1502 def has_resource(self, resource_name): 

1503 return self._has(self._fn(self.module_path, resource_name)) 

1504 

1505 def _get_metadata_path(self, name): 

1506 return self._fn(self.egg_info, name) 

1507 

1508 def has_metadata(self, name): 

1509 if not self.egg_info: 

1510 return self.egg_info 

1511 

1512 path = self._get_metadata_path(name) 

1513 return self._has(path) 

1514 

1515 def get_metadata(self, name): 

1516 if not self.egg_info: 

1517 return "" 

1518 path = self._get_metadata_path(name) 

1519 value = self._get(path) 

1520 try: 

1521 return value.decode('utf-8') 

1522 except UnicodeDecodeError as exc: 

1523 # Include the path in the error message to simplify 

1524 # troubleshooting, and without changing the exception type. 

1525 exc.reason += ' in {} file at path: {}'.format(name, path) 

1526 raise 

1527 

1528 def get_metadata_lines(self, name): 

1529 return yield_lines(self.get_metadata(name)) 

1530 

1531 def resource_isdir(self, resource_name): 

1532 return self._isdir(self._fn(self.module_path, resource_name)) 

1533 

1534 def metadata_isdir(self, name): 

1535 return self.egg_info and self._isdir(self._fn(self.egg_info, name)) 

1536 

1537 def resource_listdir(self, resource_name): 

1538 return self._listdir(self._fn(self.module_path, resource_name)) 

1539 

1540 def metadata_listdir(self, name): 

1541 if self.egg_info: 

1542 return self._listdir(self._fn(self.egg_info, name)) 

1543 return [] 

1544 

1545 def run_script(self, script_name, namespace): 

1546 script = 'scripts/' + script_name 

1547 if not self.has_metadata(script): 

1548 raise ResolutionError( 

1549 "Script {script!r} not found in metadata at {self.egg_info!r}".format( 

1550 **locals() 

1551 ), 

1552 ) 

1553 script_text = self.get_metadata(script).replace('\r\n', '\n') 

1554 script_text = script_text.replace('\r', '\n') 

1555 script_filename = self._fn(self.egg_info, script) 

1556 namespace['__file__'] = script_filename 

1557 if os.path.exists(script_filename): 

1558 with open(script_filename) as fid: 

1559 source = fid.read() 

1560 code = compile(source, script_filename, 'exec') 

1561 exec(code, namespace, namespace) 

1562 else: 

1563 from linecache import cache 

1564 

1565 cache[script_filename] = ( 

1566 len(script_text), 

1567 0, 

1568 script_text.split('\n'), 

1569 script_filename, 

1570 ) 

1571 script_code = compile(script_text, script_filename, 'exec') 

1572 exec(script_code, namespace, namespace) 

1573 

1574 def _has(self, path): 

1575 raise NotImplementedError( 

1576 "Can't perform this operation for unregistered loader type" 

1577 ) 

1578 

1579 def _isdir(self, path): 

1580 raise NotImplementedError( 

1581 "Can't perform this operation for unregistered loader type" 

1582 ) 

1583 

1584 def _listdir(self, path): 

1585 raise NotImplementedError( 

1586 "Can't perform this operation for unregistered loader type" 

1587 ) 

1588 

1589 def _fn(self, base, resource_name): 

1590 self._validate_resource_path(resource_name) 

1591 if resource_name: 

1592 return os.path.join(base, *resource_name.split('/')) 

1593 return base 

1594 

1595 @staticmethod 

1596 def _validate_resource_path(path): 

1597 """ 

1598 Validate the resource paths according to the docs. 

1599 https://setuptools.pypa.io/en/latest/pkg_resources.html#basic-resource-access 

1600 

1601 >>> warned = getfixture('recwarn') 

1602 >>> warnings.simplefilter('always') 

1603 >>> vrp = NullProvider._validate_resource_path 

1604 >>> vrp('foo/bar.txt') 

1605 >>> bool(warned) 

1606 False 

1607 >>> vrp('../foo/bar.txt') 

1608 >>> bool(warned) 

1609 True 

1610 >>> warned.clear() 

1611 >>> vrp('/foo/bar.txt') 

1612 >>> bool(warned) 

1613 True 

1614 >>> vrp('foo/../../bar.txt') 

1615 >>> bool(warned) 

1616 True 

1617 >>> warned.clear() 

1618 >>> vrp('foo/f../bar.txt') 

1619 >>> bool(warned) 

1620 False 

1621 

1622 Windows path separators are straight-up disallowed. 

1623 >>> vrp(r'\\foo/bar.txt') 

1624 Traceback (most recent call last): 

1625 ... 

1626 ValueError: Use of .. or absolute path in a resource path \ 

1627is not allowed. 

1628 

1629 >>> vrp(r'C:\\foo/bar.txt') 

1630 Traceback (most recent call last): 

1631 ... 

1632 ValueError: Use of .. or absolute path in a resource path \ 

1633is not allowed. 

1634 

1635 Blank values are allowed 

1636 

1637 >>> vrp('') 

1638 >>> bool(warned) 

1639 False 

1640 

1641 Non-string values are not. 

1642 

1643 >>> vrp(None) 

1644 Traceback (most recent call last): 

1645 ... 

1646 AttributeError: ... 

1647 """ 

1648 invalid = ( 

1649 os.path.pardir in path.split(posixpath.sep) 

1650 or posixpath.isabs(path) 

1651 or ntpath.isabs(path) 

1652 ) 

1653 if not invalid: 

1654 return 

1655 

1656 msg = "Use of .. or absolute path in a resource path is not allowed." 

1657 

1658 # Aggressively disallow Windows absolute paths 

1659 if ntpath.isabs(path) and not posixpath.isabs(path): 

1660 raise ValueError(msg) 

1661 

1662 # for compatibility, warn; in future 

1663 # raise ValueError(msg) 

1664 issue_warning( 

1665 msg[:-1] + " and will raise exceptions in a future release.", 

1666 DeprecationWarning, 

1667 ) 

1668 

1669 def _get(self, path): 

1670 if hasattr(self.loader, 'get_data'): 

1671 return self.loader.get_data(path) 

1672 raise NotImplementedError( 

1673 "Can't perform this operation for loaders without 'get_data()'" 

1674 ) 

1675 

1676 

1677register_loader_type(object, NullProvider) 

1678 

1679 

1680def _parents(path): 

1681 """ 

1682 yield all parents of path including path 

1683 """ 

1684 last = None 

1685 while path != last: 

1686 yield path 

1687 last = path 

1688 path, _ = os.path.split(path) 

1689 

1690 

1691class EggProvider(NullProvider): 

1692 """Provider based on a virtual filesystem""" 

1693 

1694 def __init__(self, module): 

1695 super().__init__(module) 

1696 self._setup_prefix() 

1697 

1698 def _setup_prefix(self): 

1699 # Assume that metadata may be nested inside a "basket" 

1700 # of multiple eggs and use module_path instead of .archive. 

1701 eggs = filter(_is_egg_path, _parents(self.module_path)) 

1702 egg = next(eggs, None) 

1703 egg and self._set_egg(egg) 

1704 

1705 def _set_egg(self, path): 

1706 self.egg_name = os.path.basename(path) 

1707 self.egg_info = os.path.join(path, 'EGG-INFO') 

1708 self.egg_root = path 

1709 

1710 

1711class DefaultProvider(EggProvider): 

1712 """Provides access to package resources in the filesystem""" 

1713 

1714 def _has(self, path): 

1715 return os.path.exists(path) 

1716 

1717 def _isdir(self, path): 

1718 return os.path.isdir(path) 

1719 

1720 def _listdir(self, path): 

1721 return os.listdir(path) 

1722 

1723 def get_resource_stream(self, manager, resource_name): 

1724 return open(self._fn(self.module_path, resource_name), 'rb') 

1725 

1726 def _get(self, path): 

1727 with open(path, 'rb') as stream: 

1728 return stream.read() 

1729 

1730 @classmethod 

1731 def _register(cls): 

1732 loader_names = ( 

1733 'SourceFileLoader', 

1734 'SourcelessFileLoader', 

1735 ) 

1736 for name in loader_names: 

1737 loader_cls = getattr(importlib_machinery, name, type(None)) 

1738 register_loader_type(loader_cls, cls) 

1739 

1740 

1741DefaultProvider._register() 

1742 

1743 

1744class EmptyProvider(NullProvider): 

1745 """Provider that returns nothing for all requests""" 

1746 

1747 module_path = None 

1748 

1749 _isdir = _has = lambda self, path: False 

1750 

1751 def _get(self, path): 

1752 return '' 

1753 

1754 def _listdir(self, path): 

1755 return [] 

1756 

1757 def __init__(self): 

1758 pass 

1759 

1760 

1761empty_provider = EmptyProvider() 

1762 

1763 

1764class ZipManifests(dict): 

1765 """ 

1766 zip manifest builder 

1767 """ 

1768 

1769 @classmethod 

1770 def build(cls, path): 

1771 """ 

1772 Build a dictionary similar to the zipimport directory 

1773 caches, except instead of tuples, store ZipInfo objects. 

1774 

1775 Use a platform-specific path separator (os.sep) for the path keys 

1776 for compatibility with pypy on Windows. 

1777 """ 

1778 with zipfile.ZipFile(path) as zfile: 

1779 items = ( 

1780 ( 

1781 name.replace('/', os.sep), 

1782 zfile.getinfo(name), 

1783 ) 

1784 for name in zfile.namelist() 

1785 ) 

1786 return dict(items) 

1787 

1788 load = build 

1789 

1790 

1791class MemoizedZipManifests(ZipManifests): 

1792 """ 

1793 Memoized zipfile manifests. 

1794 """ 

1795 

1796 manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime') 

1797 

1798 def load(self, path): 

1799 """ 

1800 Load a manifest at path or return a suitable manifest already loaded. 

1801 """ 

1802 path = os.path.normpath(path) 

1803 mtime = os.stat(path).st_mtime 

1804 

1805 if path not in self or self[path].mtime != mtime: 

1806 manifest = self.build(path) 

1807 self[path] = self.manifest_mod(manifest, mtime) 

1808 

1809 return self[path].manifest 

1810 

1811 

1812class ZipProvider(EggProvider): 

1813 """Resource support for zips and eggs""" 

1814 

1815 eagers = None 

1816 _zip_manifests = MemoizedZipManifests() 

1817 

1818 def __init__(self, module): 

1819 super().__init__(module) 

1820 self.zip_pre = self.loader.archive + os.sep 

1821 

1822 def _zipinfo_name(self, fspath): 

1823 # Convert a virtual filename (full path to file) into a zipfile subpath 

1824 # usable with the zipimport directory cache for our target archive 

1825 fspath = fspath.rstrip(os.sep) 

1826 if fspath == self.loader.archive: 

1827 return '' 

1828 if fspath.startswith(self.zip_pre): 

1829 return fspath[len(self.zip_pre) :] 

1830 raise AssertionError("%s is not a subpath of %s" % (fspath, self.zip_pre)) 

1831 

1832 def _parts(self, zip_path): 

1833 # Convert a zipfile subpath into an egg-relative path part list. 

1834 # pseudo-fs path 

1835 fspath = self.zip_pre + zip_path 

1836 if fspath.startswith(self.egg_root + os.sep): 

1837 return fspath[len(self.egg_root) + 1 :].split(os.sep) 

1838 raise AssertionError("%s is not a subpath of %s" % (fspath, self.egg_root)) 

1839 

1840 @property 

1841 def zipinfo(self): 

1842 return self._zip_manifests.load(self.loader.archive) 

1843 

1844 def get_resource_filename(self, manager, resource_name): 

1845 if not self.egg_name: 

1846 raise NotImplementedError( 

1847 "resource_filename() only supported for .egg, not .zip" 

1848 ) 

1849 # no need to lock for extraction, since we use temp names 

1850 zip_path = self._resource_to_zip(resource_name) 

1851 eagers = self._get_eager_resources() 

1852 if '/'.join(self._parts(zip_path)) in eagers: 

1853 for name in eagers: 

1854 self._extract_resource(manager, self._eager_to_zip(name)) 

1855 return self._extract_resource(manager, zip_path) 

1856 

1857 @staticmethod 

1858 def _get_date_and_size(zip_stat): 

1859 size = zip_stat.file_size 

1860 # ymdhms+wday, yday, dst 

1861 date_time = zip_stat.date_time + (0, 0, -1) 

1862 # 1980 offset already done 

1863 timestamp = time.mktime(date_time) 

1864 return timestamp, size 

1865 

1866 # FIXME: 'ZipProvider._extract_resource' is too complex (12) 

1867 def _extract_resource(self, manager, zip_path): # noqa: C901 

1868 if zip_path in self._index(): 

1869 for name in self._index()[zip_path]: 

1870 last = self._extract_resource(manager, os.path.join(zip_path, name)) 

1871 # return the extracted directory name 

1872 return os.path.dirname(last) 

1873 

1874 timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) 

1875 

1876 if not WRITE_SUPPORT: 

1877 raise IOError( 

1878 '"os.rename" and "os.unlink" are not supported ' 'on this platform' 

1879 ) 

1880 try: 

1881 real_path = manager.get_cache_path(self.egg_name, self._parts(zip_path)) 

1882 

1883 if self._is_current(real_path, zip_path): 

1884 return real_path 

1885 

1886 outf, tmpnam = _mkstemp( 

1887 ".$extract", 

1888 dir=os.path.dirname(real_path), 

1889 ) 

1890 os.write(outf, self.loader.get_data(zip_path)) 

1891 os.close(outf) 

1892 utime(tmpnam, (timestamp, timestamp)) 

1893 manager.postprocess(tmpnam, real_path) 

1894 

1895 try: 

1896 rename(tmpnam, real_path) 

1897 

1898 except os.error: 

1899 if os.path.isfile(real_path): 

1900 if self._is_current(real_path, zip_path): 

1901 # the file became current since it was checked above, 

1902 # so proceed. 

1903 return real_path 

1904 # Windows, del old file and retry 

1905 elif os.name == 'nt': 

1906 unlink(real_path) 

1907 rename(tmpnam, real_path) 

1908 return real_path 

1909 raise 

1910 

1911 except os.error: 

1912 # report a user-friendly error 

1913 manager.extraction_error() 

1914 

1915 return real_path 

1916 

1917 def _is_current(self, file_path, zip_path): 

1918 """ 

1919 Return True if the file_path is current for this zip_path 

1920 """ 

1921 timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) 

1922 if not os.path.isfile(file_path): 

1923 return False 

1924 stat = os.stat(file_path) 

1925 if stat.st_size != size or stat.st_mtime != timestamp: 

1926 return False 

1927 # check that the contents match 

1928 zip_contents = self.loader.get_data(zip_path) 

1929 with open(file_path, 'rb') as f: 

1930 file_contents = f.read() 

1931 return zip_contents == file_contents 

1932 

1933 def _get_eager_resources(self): 

1934 if self.eagers is None: 

1935 eagers = [] 

1936 for name in ('native_libs.txt', 'eager_resources.txt'): 

1937 if self.has_metadata(name): 

1938 eagers.extend(self.get_metadata_lines(name)) 

1939 self.eagers = eagers 

1940 return self.eagers 

1941 

1942 def _index(self): 

1943 try: 

1944 return self._dirindex 

1945 except AttributeError: 

1946 ind = {} 

1947 for path in self.zipinfo: 

1948 parts = path.split(os.sep) 

1949 while parts: 

1950 parent = os.sep.join(parts[:-1]) 

1951 if parent in ind: 

1952 ind[parent].append(parts[-1]) 

1953 break 

1954 else: 

1955 ind[parent] = [parts.pop()] 

1956 self._dirindex = ind 

1957 return ind 

1958 

1959 def _has(self, fspath): 

1960 zip_path = self._zipinfo_name(fspath) 

1961 return zip_path in self.zipinfo or zip_path in self._index() 

1962 

1963 def _isdir(self, fspath): 

1964 return self._zipinfo_name(fspath) in self._index() 

1965 

1966 def _listdir(self, fspath): 

1967 return list(self._index().get(self._zipinfo_name(fspath), ())) 

1968 

1969 def _eager_to_zip(self, resource_name): 

1970 return self._zipinfo_name(self._fn(self.egg_root, resource_name)) 

1971 

1972 def _resource_to_zip(self, resource_name): 

1973 return self._zipinfo_name(self._fn(self.module_path, resource_name)) 

1974 

1975 

1976register_loader_type(zipimport.zipimporter, ZipProvider) 

1977 

1978 

1979class FileMetadata(EmptyProvider): 

1980 """Metadata handler for standalone PKG-INFO files 

1981 

1982 Usage:: 

1983 

1984 metadata = FileMetadata("/path/to/PKG-INFO") 

1985 

1986 This provider rejects all data and metadata requests except for PKG-INFO, 

1987 which is treated as existing, and will be the contents of the file at 

1988 the provided location. 

1989 """ 

1990 

1991 def __init__(self, path): 

1992 self.path = path 

1993 

1994 def _get_metadata_path(self, name): 

1995 return self.path 

1996 

1997 def has_metadata(self, name): 

1998 return name == 'PKG-INFO' and os.path.isfile(self.path) 

1999 

2000 def get_metadata(self, name): 

2001 if name != 'PKG-INFO': 

2002 raise KeyError("No metadata except PKG-INFO is available") 

2003 

2004 with io.open(self.path, encoding='utf-8', errors="replace") as f: 

2005 metadata = f.read() 

2006 self._warn_on_replacement(metadata) 

2007 return metadata 

2008 

2009 def _warn_on_replacement(self, metadata): 

2010 replacement_char = '�' 

2011 if replacement_char in metadata: 

2012 tmpl = "{self.path} could not be properly decoded in UTF-8" 

2013 msg = tmpl.format(**locals()) 

2014 warnings.warn(msg) 

2015 

2016 def get_metadata_lines(self, name): 

2017 return yield_lines(self.get_metadata(name)) 

2018 

2019 

2020class PathMetadata(DefaultProvider): 

2021 """Metadata provider for egg directories 

2022 

2023 Usage:: 

2024 

2025 # Development eggs: 

2026 

2027 egg_info = "/path/to/PackageName.egg-info" 

2028 base_dir = os.path.dirname(egg_info) 

2029 metadata = PathMetadata(base_dir, egg_info) 

2030 dist_name = os.path.splitext(os.path.basename(egg_info))[0] 

2031 dist = Distribution(basedir, project_name=dist_name, metadata=metadata) 

2032 

2033 # Unpacked egg directories: 

2034 

2035 egg_path = "/path/to/PackageName-ver-pyver-etc.egg" 

2036 metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO')) 

2037 dist = Distribution.from_filename(egg_path, metadata=metadata) 

2038 """ 

2039 

2040 def __init__(self, path, egg_info): 

2041 self.module_path = path 

2042 self.egg_info = egg_info 

2043 

2044 

2045class EggMetadata(ZipProvider): 

2046 """Metadata provider for .egg files""" 

2047 

2048 def __init__(self, importer): 

2049 """Create a metadata provider from a zipimporter""" 

2050 

2051 self.zip_pre = importer.archive + os.sep 

2052 self.loader = importer 

2053 if importer.prefix: 

2054 self.module_path = os.path.join(importer.archive, importer.prefix) 

2055 else: 

2056 self.module_path = importer.archive 

2057 self._setup_prefix() 

2058 

2059 

2060_declare_state('dict', _distribution_finders={}) 

2061 

2062 

2063def register_finder(importer_type, distribution_finder): 

2064 """Register `distribution_finder` to find distributions in sys.path items 

2065 

2066 `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item 

2067 handler), and `distribution_finder` is a callable that, passed a path 

2068 item and the importer instance, yields ``Distribution`` instances found on 

2069 that path item. See ``pkg_resources.find_on_path`` for an example.""" 

2070 _distribution_finders[importer_type] = distribution_finder 

2071 

2072 

2073def find_distributions(path_item, only=False): 

2074 """Yield distributions accessible via `path_item`""" 

2075 importer = get_importer(path_item) 

2076 finder = _find_adapter(_distribution_finders, importer) 

2077 return finder(importer, path_item, only) 

2078 

2079 

2080def find_eggs_in_zip(importer, path_item, only=False): 

2081 """ 

2082 Find eggs in zip files; possibly multiple nested eggs. 

2083 """ 

2084 if importer.archive.endswith('.whl'): 

2085 # wheels are not supported with this finder 

2086 # they don't have PKG-INFO metadata, and won't ever contain eggs 

2087 return 

2088 metadata = EggMetadata(importer) 

2089 if metadata.has_metadata('PKG-INFO'): 

2090 yield Distribution.from_filename(path_item, metadata=metadata) 

2091 if only: 

2092 # don't yield nested distros 

2093 return 

2094 for subitem in metadata.resource_listdir(''): 

2095 if _is_egg_path(subitem): 

2096 subpath = os.path.join(path_item, subitem) 

2097 dists = find_eggs_in_zip(zipimport.zipimporter(subpath), subpath) 

2098 for dist in dists: 

2099 yield dist 

2100 elif subitem.lower().endswith(('.dist-info', '.egg-info')): 

2101 subpath = os.path.join(path_item, subitem) 

2102 submeta = EggMetadata(zipimport.zipimporter(subpath)) 

2103 submeta.egg_info = subpath 

2104 yield Distribution.from_location(path_item, subitem, submeta) 

2105 

2106 

2107register_finder(zipimport.zipimporter, find_eggs_in_zip) 

2108 

2109 

2110def find_nothing(importer, path_item, only=False): 

2111 return () 

2112 

2113 

2114register_finder(object, find_nothing) 

2115 

2116 

2117def find_on_path(importer, path_item, only=False): 

2118 """Yield distributions accessible on a sys.path directory""" 

2119 path_item = _normalize_cached(path_item) 

2120 

2121 if _is_unpacked_egg(path_item): 

2122 yield Distribution.from_filename( 

2123 path_item, 

2124 metadata=PathMetadata(path_item, os.path.join(path_item, 'EGG-INFO')), 

2125 ) 

2126 return 

2127 

2128 entries = (os.path.join(path_item, child) for child in safe_listdir(path_item)) 

2129 

2130 # scan for .egg and .egg-info in directory 

2131 for entry in sorted(entries): 

2132 fullpath = os.path.join(path_item, entry) 

2133 factory = dist_factory(path_item, entry, only) 

2134 for dist in factory(fullpath): 

2135 yield dist 

2136 

2137 

2138def dist_factory(path_item, entry, only): 

2139 """Return a dist_factory for the given entry.""" 

2140 lower = entry.lower() 

2141 is_egg_info = lower.endswith('.egg-info') 

2142 is_dist_info = lower.endswith('.dist-info') and os.path.isdir( 

2143 os.path.join(path_item, entry) 

2144 ) 

2145 is_meta = is_egg_info or is_dist_info 

2146 return ( 

2147 distributions_from_metadata 

2148 if is_meta 

2149 else find_distributions 

2150 if not only and _is_egg_path(entry) 

2151 else resolve_egg_link 

2152 if not only and lower.endswith('.egg-link') 

2153 else NoDists() 

2154 ) 

2155 

2156 

2157class NoDists: 

2158 """ 

2159 >>> bool(NoDists()) 

2160 False 

2161 

2162 >>> list(NoDists()('anything')) 

2163 [] 

2164 """ 

2165 

2166 def __bool__(self): 

2167 return False 

2168 

2169 def __call__(self, fullpath): 

2170 return iter(()) 

2171 

2172 

2173def safe_listdir(path): 

2174 """ 

2175 Attempt to list contents of path, but suppress some exceptions. 

2176 """ 

2177 try: 

2178 return os.listdir(path) 

2179 except (PermissionError, NotADirectoryError): 

2180 pass 

2181 except OSError as e: 

2182 # Ignore the directory if does not exist, not a directory or 

2183 # permission denied 

2184 if e.errno not in (errno.ENOTDIR, errno.EACCES, errno.ENOENT): 

2185 raise 

2186 return () 

2187 

2188 

2189def distributions_from_metadata(path): 

2190 root = os.path.dirname(path) 

2191 if os.path.isdir(path): 

2192 if len(os.listdir(path)) == 0: 

2193 # empty metadata dir; skip 

2194 return 

2195 metadata = PathMetadata(root, path) 

2196 else: 

2197 metadata = FileMetadata(path) 

2198 entry = os.path.basename(path) 

2199 yield Distribution.from_location( 

2200 root, 

2201 entry, 

2202 metadata, 

2203 precedence=DEVELOP_DIST, 

2204 ) 

2205 

2206 

2207def non_empty_lines(path): 

2208 """ 

2209 Yield non-empty lines from file at path 

2210 """ 

2211 with open(path) as f: 

2212 for line in f: 

2213 line = line.strip() 

2214 if line: 

2215 yield line 

2216 

2217 

2218def resolve_egg_link(path): 

2219 """ 

2220 Given a path to an .egg-link, resolve distributions 

2221 present in the referenced path. 

2222 """ 

2223 referenced_paths = non_empty_lines(path) 

2224 resolved_paths = ( 

2225 os.path.join(os.path.dirname(path), ref) for ref in referenced_paths 

2226 ) 

2227 dist_groups = map(find_distributions, resolved_paths) 

2228 return next(dist_groups, ()) 

2229 

2230 

2231if hasattr(pkgutil, 'ImpImporter'): 

2232 register_finder(pkgutil.ImpImporter, find_on_path) 

2233 

2234register_finder(importlib_machinery.FileFinder, find_on_path) 

2235 

2236_declare_state('dict', _namespace_handlers={}) 

2237_declare_state('dict', _namespace_packages={}) 

2238 

2239 

2240def register_namespace_handler(importer_type, namespace_handler): 

2241 """Register `namespace_handler` to declare namespace packages 

2242 

2243 `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item 

2244 handler), and `namespace_handler` is a callable like this:: 

2245 

2246 def namespace_handler(importer, path_entry, moduleName, module): 

2247 # return a path_entry to use for child packages 

2248 

2249 Namespace handlers are only called if the importer object has already 

2250 agreed that it can handle the relevant path item, and they should only 

2251 return a subpath if the module __path__ does not already contain an 

2252 equivalent subpath. For an example namespace handler, see 

2253 ``pkg_resources.file_ns_handler``. 

2254 """ 

2255 _namespace_handlers[importer_type] = namespace_handler 

2256 

2257 

2258def _handle_ns(packageName, path_item): 

2259 """Ensure that named package includes a subpath of path_item (if needed)""" 

2260 

2261 importer = get_importer(path_item) 

2262 if importer is None: 

2263 return None 

2264 

2265 # use find_spec (PEP 451) and fall-back to find_module (PEP 302) 

2266 try: 

2267 spec = importer.find_spec(packageName) 

2268 except AttributeError: 

2269 # capture warnings due to #1111 

2270 with warnings.catch_warnings(): 

2271 warnings.simplefilter("ignore") 

2272 loader = importer.find_module(packageName) 

2273 else: 

2274 loader = spec.loader if spec else None 

2275 

2276 if loader is None: 

2277 return None 

2278 module = sys.modules.get(packageName) 

2279 if module is None: 

2280 module = sys.modules[packageName] = types.ModuleType(packageName) 

2281 module.__path__ = [] 

2282 _set_parent_ns(packageName) 

2283 elif not hasattr(module, '__path__'): 

2284 raise TypeError("Not a package:", packageName) 

2285 handler = _find_adapter(_namespace_handlers, importer) 

2286 subpath = handler(importer, path_item, packageName, module) 

2287 if subpath is not None: 

2288 path = module.__path__ 

2289 path.append(subpath) 

2290 importlib.import_module(packageName) 

2291 _rebuild_mod_path(path, packageName, module) 

2292 return subpath 

2293 

2294 

2295def _rebuild_mod_path(orig_path, package_name, module): 

2296 """ 

2297 Rebuild module.__path__ ensuring that all entries are ordered 

2298 corresponding to their sys.path order 

2299 """ 

2300 sys_path = [_normalize_cached(p) for p in sys.path] 

2301 

2302 def safe_sys_path_index(entry): 

2303 """ 

2304 Workaround for #520 and #513. 

2305 """ 

2306 try: 

2307 return sys_path.index(entry) 

2308 except ValueError: 

2309 return float('inf') 

2310 

2311 def position_in_sys_path(path): 

2312 """ 

2313 Return the ordinal of the path based on its position in sys.path 

2314 """ 

2315 path_parts = path.split(os.sep) 

2316 module_parts = package_name.count('.') + 1 

2317 parts = path_parts[:-module_parts] 

2318 return safe_sys_path_index(_normalize_cached(os.sep.join(parts))) 

2319 

2320 new_path = sorted(orig_path, key=position_in_sys_path) 

2321 new_path = [_normalize_cached(p) for p in new_path] 

2322 

2323 if isinstance(module.__path__, list): 

2324 module.__path__[:] = new_path 

2325 else: 

2326 module.__path__ = new_path 

2327 

2328 

2329def declare_namespace(packageName): 

2330 """Declare that package 'packageName' is a namespace package""" 

2331 

2332 msg = ( 

2333 f"Deprecated call to `pkg_resources.declare_namespace({packageName!r})`.\n" 

2334 "Implementing implicit namespace packages (as specified in PEP 420) " 

2335 "is preferred to `pkg_resources.declare_namespace`. " 

2336 "See https://setuptools.pypa.io/en/latest/references/" 

2337 "keywords.html#keyword-namespace-packages" 

2338 ) 

2339 warnings.warn(msg, DeprecationWarning, stacklevel=2) 

2340 

2341 _imp.acquire_lock() 

2342 try: 

2343 if packageName in _namespace_packages: 

2344 return 

2345 

2346 path = sys.path 

2347 parent, _, _ = packageName.rpartition('.') 

2348 

2349 if parent: 

2350 declare_namespace(parent) 

2351 if parent not in _namespace_packages: 

2352 __import__(parent) 

2353 try: 

2354 path = sys.modules[parent].__path__ 

2355 except AttributeError as e: 

2356 raise TypeError("Not a package:", parent) from e 

2357 

2358 # Track what packages are namespaces, so when new path items are added, 

2359 # they can be updated 

2360 _namespace_packages.setdefault(parent or None, []).append(packageName) 

2361 _namespace_packages.setdefault(packageName, []) 

2362 

2363 for path_item in path: 

2364 # Ensure all the parent's path items are reflected in the child, 

2365 # if they apply 

2366 _handle_ns(packageName, path_item) 

2367 

2368 finally: 

2369 _imp.release_lock() 

2370 

2371 

2372def fixup_namespace_packages(path_item, parent=None): 

2373 """Ensure that previously-declared namespace packages include path_item""" 

2374 _imp.acquire_lock() 

2375 try: 

2376 for package in _namespace_packages.get(parent, ()): 

2377 subpath = _handle_ns(package, path_item) 

2378 if subpath: 

2379 fixup_namespace_packages(subpath, package) 

2380 finally: 

2381 _imp.release_lock() 

2382 

2383 

2384def file_ns_handler(importer, path_item, packageName, module): 

2385 """Compute an ns-package subpath for a filesystem or zipfile importer""" 

2386 

2387 subpath = os.path.join(path_item, packageName.split('.')[-1]) 

2388 normalized = _normalize_cached(subpath) 

2389 for item in module.__path__: 

2390 if _normalize_cached(item) == normalized: 

2391 break 

2392 else: 

2393 # Only return the path if it's not already there 

2394 return subpath 

2395 

2396 

2397if hasattr(pkgutil, 'ImpImporter'): 

2398 register_namespace_handler(pkgutil.ImpImporter, file_ns_handler) 

2399 

2400register_namespace_handler(zipimport.zipimporter, file_ns_handler) 

2401register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler) 

2402 

2403 

2404def null_ns_handler(importer, path_item, packageName, module): 

2405 return None 

2406 

2407 

2408register_namespace_handler(object, null_ns_handler) 

2409 

2410 

2411def normalize_path(filename): 

2412 """Normalize a file/dir name for comparison purposes""" 

2413 return os.path.normcase(os.path.realpath(os.path.normpath(_cygwin_patch(filename)))) 

2414 

2415 

2416def _cygwin_patch(filename): # pragma: nocover 

2417 """ 

2418 Contrary to POSIX 2008, on Cygwin, getcwd (3) contains 

2419 symlink components. Using 

2420 os.path.abspath() works around this limitation. A fix in os.getcwd() 

2421 would probably better, in Cygwin even more so, except 

2422 that this seems to be by design... 

2423 """ 

2424 return os.path.abspath(filename) if sys.platform == 'cygwin' else filename 

2425 

2426 

2427def _normalize_cached(filename, _cache={}): 

2428 try: 

2429 return _cache[filename] 

2430 except KeyError: 

2431 _cache[filename] = result = normalize_path(filename) 

2432 return result 

2433 

2434 

2435def _is_egg_path(path): 

2436 """ 

2437 Determine if given path appears to be an egg. 

2438 """ 

2439 return _is_zip_egg(path) or _is_unpacked_egg(path) 

2440 

2441 

2442def _is_zip_egg(path): 

2443 return ( 

2444 path.lower().endswith('.egg') 

2445 and os.path.isfile(path) 

2446 and zipfile.is_zipfile(path) 

2447 ) 

2448 

2449 

2450def _is_unpacked_egg(path): 

2451 """ 

2452 Determine if given path appears to be an unpacked egg. 

2453 """ 

2454 return path.lower().endswith('.egg') and os.path.isfile( 

2455 os.path.join(path, 'EGG-INFO', 'PKG-INFO') 

2456 ) 

2457 

2458 

2459def _set_parent_ns(packageName): 

2460 parts = packageName.split('.') 

2461 name = parts.pop() 

2462 if parts: 

2463 parent = '.'.join(parts) 

2464 setattr(sys.modules[parent], name, sys.modules[packageName]) 

2465 

2466 

2467MODULE = re.compile(r"\w+(\.\w+)*$").match 

2468EGG_NAME = re.compile( 

2469 r""" 

2470 (?P<name>[^-]+) ( 

2471 -(?P<ver>[^-]+) ( 

2472 -py(?P<pyver>[^-]+) ( 

2473 -(?P<plat>.+) 

2474 )? 

2475 )? 

2476 )? 

2477 """, 

2478 re.VERBOSE | re.IGNORECASE, 

2479).match 

2480 

2481 

2482class EntryPoint: 

2483 """Object representing an advertised importable object""" 

2484 

2485 def __init__(self, name, module_name, attrs=(), extras=(), dist=None): 

2486 if not MODULE(module_name): 

2487 raise ValueError("Invalid module name", module_name) 

2488 self.name = name 

2489 self.module_name = module_name 

2490 self.attrs = tuple(attrs) 

2491 self.extras = tuple(extras) 

2492 self.dist = dist 

2493 

2494 def __str__(self): 

2495 s = "%s = %s" % (self.name, self.module_name) 

2496 if self.attrs: 

2497 s += ':' + '.'.join(self.attrs) 

2498 if self.extras: 

2499 s += ' [%s]' % ','.join(self.extras) 

2500 return s 

2501 

2502 def __repr__(self): 

2503 return "EntryPoint.parse(%r)" % str(self) 

2504 

2505 def load(self, require=True, *args, **kwargs): 

2506 """ 

2507 Require packages for this EntryPoint, then resolve it. 

2508 """ 

2509 if not require or args or kwargs: 

2510 warnings.warn( 

2511 "Parameters to load are deprecated. Call .resolve and " 

2512 ".require separately.", 

2513 PkgResourcesDeprecationWarning, 

2514 stacklevel=2, 

2515 ) 

2516 if require: 

2517 self.require(*args, **kwargs) 

2518 return self.resolve() 

2519 

2520 def resolve(self): 

2521 """ 

2522 Resolve the entry point from its module and attrs. 

2523 """ 

2524 module = __import__(self.module_name, fromlist=['__name__'], level=0) 

2525 try: 

2526 return functools.reduce(getattr, self.attrs, module) 

2527 except AttributeError as exc: 

2528 raise ImportError(str(exc)) from exc 

2529 

2530 def require(self, env=None, installer=None): 

2531 if self.extras and not self.dist: 

2532 raise UnknownExtra("Can't require() without a distribution", self) 

2533 

2534 # Get the requirements for this entry point with all its extras and 

2535 # then resolve them. We have to pass `extras` along when resolving so 

2536 # that the working set knows what extras we want. Otherwise, for 

2537 # dist-info distributions, the working set will assume that the 

2538 # requirements for that extra are purely optional and skip over them. 

2539 reqs = self.dist.requires(self.extras) 

2540 items = working_set.resolve(reqs, env, installer, extras=self.extras) 

2541 list(map(working_set.add, items)) 

2542 

2543 pattern = re.compile( 

2544 r'\s*' 

2545 r'(?P<name>.+?)\s*' 

2546 r'=\s*' 

2547 r'(?P<module>[\w.]+)\s*' 

2548 r'(:\s*(?P<attr>[\w.]+))?\s*' 

2549 r'(?P<extras>\[.*\])?\s*$' 

2550 ) 

2551 

2552 @classmethod 

2553 def parse(cls, src, dist=None): 

2554 """Parse a single entry point from string `src` 

2555 

2556 Entry point syntax follows the form:: 

2557 

2558 name = some.module:some.attr [extra1, extra2] 

2559 

2560 The entry name and module name are required, but the ``:attrs`` and 

2561 ``[extras]`` parts are optional 

2562 """ 

2563 m = cls.pattern.match(src) 

2564 if not m: 

2565 msg = "EntryPoint must be in 'name=module:attrs [extras]' format" 

2566 raise ValueError(msg, src) 

2567 res = m.groupdict() 

2568 extras = cls._parse_extras(res['extras']) 

2569 attrs = res['attr'].split('.') if res['attr'] else () 

2570 return cls(res['name'], res['module'], attrs, extras, dist) 

2571 

2572 @classmethod 

2573 def _parse_extras(cls, extras_spec): 

2574 if not extras_spec: 

2575 return () 

2576 req = Requirement.parse('x' + extras_spec) 

2577 if req.specs: 

2578 raise ValueError() 

2579 return req.extras 

2580 

2581 @classmethod 

2582 def parse_group(cls, group, lines, dist=None): 

2583 """Parse an entry point group""" 

2584 if not MODULE(group): 

2585 raise ValueError("Invalid group name", group) 

2586 this = {} 

2587 for line in yield_lines(lines): 

2588 ep = cls.parse(line, dist) 

2589 if ep.name in this: 

2590 raise ValueError("Duplicate entry point", group, ep.name) 

2591 this[ep.name] = ep 

2592 return this 

2593 

2594 @classmethod 

2595 def parse_map(cls, data, dist=None): 

2596 """Parse a map of entry point groups""" 

2597 if isinstance(data, dict): 

2598 data = data.items() 

2599 else: 

2600 data = split_sections(data) 

2601 maps = {} 

2602 for group, lines in data: 

2603 if group is None: 

2604 if not lines: 

2605 continue 

2606 raise ValueError("Entry points must be listed in groups") 

2607 group = group.strip() 

2608 if group in maps: 

2609 raise ValueError("Duplicate group name", group) 

2610 maps[group] = cls.parse_group(group, lines, dist) 

2611 return maps 

2612 

2613 

2614def _version_from_file(lines): 

2615 """ 

2616 Given an iterable of lines from a Metadata file, return 

2617 the value of the Version field, if present, or None otherwise. 

2618 """ 

2619 

2620 def is_version_line(line): 

2621 return line.lower().startswith('version:') 

2622 

2623 version_lines = filter(is_version_line, lines) 

2624 line = next(iter(version_lines), '') 

2625 _, _, value = line.partition(':') 

2626 return safe_version(value.strip()) or None 

2627 

2628 

2629class Distribution: 

2630 """Wrap an actual or potential sys.path entry w/metadata""" 

2631 

2632 PKG_INFO = 'PKG-INFO' 

2633 

2634 def __init__( 

2635 self, 

2636 location=None, 

2637 metadata=None, 

2638 project_name=None, 

2639 version=None, 

2640 py_version=PY_MAJOR, 

2641 platform=None, 

2642 precedence=EGG_DIST, 

2643 ): 

2644 self.project_name = safe_name(project_name or 'Unknown') 

2645 if version is not None: 

2646 self._version = safe_version(version) 

2647 self.py_version = py_version 

2648 self.platform = platform 

2649 self.location = location 

2650 self.precedence = precedence 

2651 self._provider = metadata or empty_provider 

2652 

2653 @classmethod 

2654 def from_location(cls, location, basename, metadata=None, **kw): 

2655 project_name, version, py_version, platform = [None] * 4 

2656 basename, ext = os.path.splitext(basename) 

2657 if ext.lower() in _distributionImpl: 

2658 cls = _distributionImpl[ext.lower()] 

2659 

2660 match = EGG_NAME(basename) 

2661 if match: 

2662 project_name, version, py_version, platform = match.group( 

2663 'name', 'ver', 'pyver', 'plat' 

2664 ) 

2665 return cls( 

2666 location, 

2667 metadata, 

2668 project_name=project_name, 

2669 version=version, 

2670 py_version=py_version, 

2671 platform=platform, 

2672 **kw, 

2673 )._reload_version() 

2674 

2675 def _reload_version(self): 

2676 return self 

2677 

2678 @property 

2679 def hashcmp(self): 

2680 return ( 

2681 self._forgiving_parsed_version, 

2682 self.precedence, 

2683 self.key, 

2684 self.location, 

2685 self.py_version or '', 

2686 self.platform or '', 

2687 ) 

2688 

2689 def __hash__(self): 

2690 return hash(self.hashcmp) 

2691 

2692 def __lt__(self, other): 

2693 return self.hashcmp < other.hashcmp 

2694 

2695 def __le__(self, other): 

2696 return self.hashcmp <= other.hashcmp 

2697 

2698 def __gt__(self, other): 

2699 return self.hashcmp > other.hashcmp 

2700 

2701 def __ge__(self, other): 

2702 return self.hashcmp >= other.hashcmp 

2703 

2704 def __eq__(self, other): 

2705 if not isinstance(other, self.__class__): 

2706 # It's not a Distribution, so they are not equal 

2707 return False 

2708 return self.hashcmp == other.hashcmp 

2709 

2710 def __ne__(self, other): 

2711 return not self == other 

2712 

2713 # These properties have to be lazy so that we don't have to load any 

2714 # metadata until/unless it's actually needed. (i.e., some distributions 

2715 # may not know their name or version without loading PKG-INFO) 

2716 

2717 @property 

2718 def key(self): 

2719 try: 

2720 return self._key 

2721 except AttributeError: 

2722 self._key = key = self.project_name.lower() 

2723 return key 

2724 

2725 @property 

2726 def parsed_version(self): 

2727 if not hasattr(self, "_parsed_version"): 

2728 try: 

2729 self._parsed_version = parse_version(self.version) 

2730 except packaging.version.InvalidVersion as ex: 

2731 info = f"(package: {self.project_name})" 

2732 if hasattr(ex, "add_note"): 

2733 ex.add_note(info) # PEP 678 

2734 raise 

2735 raise packaging.version.InvalidVersion(f"{str(ex)} {info}") from None 

2736 

2737 return self._parsed_version 

2738 

2739 @property 

2740 def _forgiving_parsed_version(self): 

2741 try: 

2742 return self.parsed_version 

2743 except packaging.version.InvalidVersion as ex: 

2744 self._parsed_version = parse_version(_forgiving_version(self.version)) 

2745 

2746 notes = "\n".join(getattr(ex, "__notes__", [])) # PEP 678 

2747 msg = f"""!!\n\n 

2748 ************************************************************************* 

2749 {str(ex)}\n{notes} 

2750 

2751 This is a long overdue deprecation. 

2752 For the time being, `pkg_resources` will use `{self._parsed_version}` 

2753 as a replacement to avoid breaking existing environments, 

2754 but no future compatibility is guaranteed. 

2755 

2756 If you maintain package {self.project_name} you should implement 

2757 the relevant changes to adequate the project to PEP 440 immediately. 

2758 ************************************************************************* 

2759 \n\n!! 

2760 """ 

2761 warnings.warn(msg, DeprecationWarning) 

2762 

2763 return self._parsed_version 

2764 

2765 @property 

2766 def version(self): 

2767 try: 

2768 return self._version 

2769 except AttributeError as e: 

2770 version = self._get_version() 

2771 if version is None: 

2772 path = self._get_metadata_path_for_display(self.PKG_INFO) 

2773 msg = ("Missing 'Version:' header and/or {} file at path: {}").format( 

2774 self.PKG_INFO, path 

2775 ) 

2776 raise ValueError(msg, self) from e 

2777 

2778 return version 

2779 

2780 @property 

2781 def _dep_map(self): 

2782 """ 

2783 A map of extra to its list of (direct) requirements 

2784 for this distribution, including the null extra. 

2785 """ 

2786 try: 

2787 return self.__dep_map 

2788 except AttributeError: 

2789 self.__dep_map = self._filter_extras(self._build_dep_map()) 

2790 return self.__dep_map 

2791 

2792 @staticmethod 

2793 def _filter_extras(dm): 

2794 """ 

2795 Given a mapping of extras to dependencies, strip off 

2796 environment markers and filter out any dependencies 

2797 not matching the markers. 

2798 """ 

2799 for extra in list(filter(None, dm)): 

2800 new_extra = extra 

2801 reqs = dm.pop(extra) 

2802 new_extra, _, marker = extra.partition(':') 

2803 fails_marker = marker and ( 

2804 invalid_marker(marker) or not evaluate_marker(marker) 

2805 ) 

2806 if fails_marker: 

2807 reqs = [] 

2808 new_extra = safe_extra(new_extra) or None 

2809 

2810 dm.setdefault(new_extra, []).extend(reqs) 

2811 return dm 

2812 

2813 def _build_dep_map(self): 

2814 dm = {} 

2815 for name in 'requires.txt', 'depends.txt': 

2816 for extra, reqs in split_sections(self._get_metadata(name)): 

2817 dm.setdefault(extra, []).extend(parse_requirements(reqs)) 

2818 return dm 

2819 

2820 def requires(self, extras=()): 

2821 """List of Requirements needed for this distro if `extras` are used""" 

2822 dm = self._dep_map 

2823 deps = [] 

2824 deps.extend(dm.get(None, ())) 

2825 for ext in extras: 

2826 try: 

2827 deps.extend(dm[safe_extra(ext)]) 

2828 except KeyError as e: 

2829 raise UnknownExtra( 

2830 "%s has no such extra feature %r" % (self, ext) 

2831 ) from e 

2832 return deps 

2833 

2834 def _get_metadata_path_for_display(self, name): 

2835 """ 

2836 Return the path to the given metadata file, if available. 

2837 """ 

2838 try: 

2839 # We need to access _get_metadata_path() on the provider object 

2840 # directly rather than through this class's __getattr__() 

2841 # since _get_metadata_path() is marked private. 

2842 path = self._provider._get_metadata_path(name) 

2843 

2844 # Handle exceptions e.g. in case the distribution's metadata 

2845 # provider doesn't support _get_metadata_path(). 

2846 except Exception: 

2847 return '[could not detect]' 

2848 

2849 return path 

2850 

2851 def _get_metadata(self, name): 

2852 if self.has_metadata(name): 

2853 for line in self.get_metadata_lines(name): 

2854 yield line 

2855 

2856 def _get_version(self): 

2857 lines = self._get_metadata(self.PKG_INFO) 

2858 version = _version_from_file(lines) 

2859 

2860 return version 

2861 

2862 def activate(self, path=None, replace=False): 

2863 """Ensure distribution is importable on `path` (default=sys.path)""" 

2864 if path is None: 

2865 path = sys.path 

2866 self.insert_on(path, replace=replace) 

2867 if path is sys.path: 

2868 fixup_namespace_packages(self.location) 

2869 for pkg in self._get_metadata('namespace_packages.txt'): 

2870 if pkg in sys.modules: 

2871 declare_namespace(pkg) 

2872 

2873 def egg_name(self): 

2874 """Return what this distribution's standard .egg filename should be""" 

2875 filename = "%s-%s-py%s" % ( 

2876 to_filename(self.project_name), 

2877 to_filename(self.version), 

2878 self.py_version or PY_MAJOR, 

2879 ) 

2880 

2881 if self.platform: 

2882 filename += '-' + self.platform 

2883 return filename 

2884 

2885 def __repr__(self): 

2886 if self.location: 

2887 return "%s (%s)" % (self, self.location) 

2888 else: 

2889 return str(self) 

2890 

2891 def __str__(self): 

2892 try: 

2893 version = getattr(self, 'version', None) 

2894 except ValueError: 

2895 version = None 

2896 version = version or "[unknown version]" 

2897 return "%s %s" % (self.project_name, version) 

2898 

2899 def __getattr__(self, attr): 

2900 """Delegate all unrecognized public attributes to .metadata provider""" 

2901 if attr.startswith('_'): 

2902 raise AttributeError(attr) 

2903 return getattr(self._provider, attr) 

2904 

2905 def __dir__(self): 

2906 return list( 

2907 set(super(Distribution, self).__dir__()) 

2908 | set(attr for attr in self._provider.__dir__() if not attr.startswith('_')) 

2909 ) 

2910 

2911 @classmethod 

2912 def from_filename(cls, filename, metadata=None, **kw): 

2913 return cls.from_location( 

2914 _normalize_cached(filename), os.path.basename(filename), metadata, **kw 

2915 ) 

2916 

2917 def as_requirement(self): 

2918 """Return a ``Requirement`` that matches this distribution exactly""" 

2919 if isinstance(self.parsed_version, packaging.version.Version): 

2920 spec = "%s==%s" % (self.project_name, self.parsed_version) 

2921 else: 

2922 spec = "%s===%s" % (self.project_name, self.parsed_version) 

2923 

2924 return Requirement.parse(spec) 

2925 

2926 def load_entry_point(self, group, name): 

2927 """Return the `name` entry point of `group` or raise ImportError""" 

2928 ep = self.get_entry_info(group, name) 

2929 if ep is None: 

2930 raise ImportError("Entry point %r not found" % ((group, name),)) 

2931 return ep.load() 

2932 

2933 def get_entry_map(self, group=None): 

2934 """Return the entry point map for `group`, or the full entry map""" 

2935 try: 

2936 ep_map = self._ep_map 

2937 except AttributeError: 

2938 ep_map = self._ep_map = EntryPoint.parse_map( 

2939 self._get_metadata('entry_points.txt'), self 

2940 ) 

2941 if group is not None: 

2942 return ep_map.get(group, {}) 

2943 return ep_map 

2944 

2945 def get_entry_info(self, group, name): 

2946 """Return the EntryPoint object for `group`+`name`, or ``None``""" 

2947 return self.get_entry_map(group).get(name) 

2948 

2949 # FIXME: 'Distribution.insert_on' is too complex (13) 

2950 def insert_on(self, path, loc=None, replace=False): # noqa: C901 

2951 """Ensure self.location is on path 

2952 

2953 If replace=False (default): 

2954 - If location is already in path anywhere, do nothing. 

2955 - Else: 

2956 - If it's an egg and its parent directory is on path, 

2957 insert just ahead of the parent. 

2958 - Else: add to the end of path. 

2959 If replace=True: 

2960 - If location is already on path anywhere (not eggs) 

2961 or higher priority than its parent (eggs) 

2962 do nothing. 

2963 - Else: 

2964 - If it's an egg and its parent directory is on path, 

2965 insert just ahead of the parent, 

2966 removing any lower-priority entries. 

2967 - Else: add it to the front of path. 

2968 """ 

2969 

2970 loc = loc or self.location 

2971 if not loc: 

2972 return 

2973 

2974 nloc = _normalize_cached(loc) 

2975 bdir = os.path.dirname(nloc) 

2976 npath = [(p and _normalize_cached(p) or p) for p in path] 

2977 

2978 for p, item in enumerate(npath): 

2979 if item == nloc: 

2980 if replace: 

2981 break 

2982 else: 

2983 # don't modify path (even removing duplicates) if 

2984 # found and not replace 

2985 return 

2986 elif item == bdir and self.precedence == EGG_DIST: 

2987 # if it's an .egg, give it precedence over its directory 

2988 # UNLESS it's already been added to sys.path and replace=False 

2989 if (not replace) and nloc in npath[p:]: 

2990 return 

2991 if path is sys.path: 

2992 self.check_version_conflict() 

2993 path.insert(p, loc) 

2994 npath.insert(p, nloc) 

2995 break 

2996 else: 

2997 if path is sys.path: 

2998 self.check_version_conflict() 

2999 if replace: 

3000 path.insert(0, loc) 

3001 else: 

3002 path.append(loc) 

3003 return 

3004 

3005 # p is the spot where we found or inserted loc; now remove duplicates 

3006 while True: 

3007 try: 

3008 np = npath.index(nloc, p + 1) 

3009 except ValueError: 

3010 break 

3011 else: 

3012 del npath[np], path[np] 

3013 # ha! 

3014 p = np 

3015 

3016 return 

3017 

3018 def check_version_conflict(self): 

3019 if self.key == 'setuptools': 

3020 # ignore the inevitable setuptools self-conflicts :( 

3021 return 

3022 

3023 nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt')) 

3024 loc = normalize_path(self.location) 

3025 for modname in self._get_metadata('top_level.txt'): 

3026 if ( 

3027 modname not in sys.modules 

3028 or modname in nsp 

3029 or modname in _namespace_packages 

3030 ): 

3031 continue 

3032 if modname in ('pkg_resources', 'setuptools', 'site'): 

3033 continue 

3034 fn = getattr(sys.modules[modname], '__file__', None) 

3035 if fn and ( 

3036 normalize_path(fn).startswith(loc) or fn.startswith(self.location) 

3037 ): 

3038 continue 

3039 issue_warning( 

3040 "Module %s was already imported from %s, but %s is being added" 

3041 " to sys.path" % (modname, fn, self.location), 

3042 ) 

3043 

3044 def has_version(self): 

3045 try: 

3046 self.version 

3047 except ValueError: 

3048 issue_warning("Unbuilt egg for " + repr(self)) 

3049 return False 

3050 except SystemError: 

3051 # TODO: remove this except clause when python/cpython#103632 is fixed. 

3052 return False 

3053 return True 

3054 

3055 def clone(self, **kw): 

3056 """Copy this distribution, substituting in any changed keyword args""" 

3057 names = 'project_name version py_version platform location precedence' 

3058 for attr in names.split(): 

3059 kw.setdefault(attr, getattr(self, attr, None)) 

3060 kw.setdefault('metadata', self._provider) 

3061 return self.__class__(**kw) 

3062 

3063 @property 

3064 def extras(self): 

3065 return [dep for dep in self._dep_map if dep] 

3066 

3067 

3068class EggInfoDistribution(Distribution): 

3069 def _reload_version(self): 

3070 """ 

3071 Packages installed by distutils (e.g. numpy or scipy), 

3072 which uses an old safe_version, and so 

3073 their version numbers can get mangled when 

3074 converted to filenames (e.g., 1.11.0.dev0+2329eae to 

3075 1.11.0.dev0_2329eae). These distributions will not be 

3076 parsed properly 

3077 downstream by Distribution and safe_version, so 

3078 take an extra step and try to get the version number from 

3079 the metadata file itself instead of the filename. 

3080 """ 

3081 md_version = self._get_version() 

3082 if md_version: 

3083 self._version = md_version 

3084 return self 

3085 

3086 

3087class DistInfoDistribution(Distribution): 

3088 """ 

3089 Wrap an actual or potential sys.path entry 

3090 w/metadata, .dist-info style. 

3091 """ 

3092 

3093 PKG_INFO = 'METADATA' 

3094 EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])") 

3095 

3096 @property 

3097 def _parsed_pkg_info(self): 

3098 """Parse and cache metadata""" 

3099 try: 

3100 return self._pkg_info 

3101 except AttributeError: 

3102 metadata = self.get_metadata(self.PKG_INFO) 

3103 self._pkg_info = email.parser.Parser().parsestr(metadata) 

3104 return self._pkg_info 

3105 

3106 @property 

3107 def _dep_map(self): 

3108 try: 

3109 return self.__dep_map 

3110 except AttributeError: 

3111 self.__dep_map = self._compute_dependencies() 

3112 return self.__dep_map 

3113 

3114 def _compute_dependencies(self): 

3115 """Recompute this distribution's dependencies.""" 

3116 dm = self.__dep_map = {None: []} 

3117 

3118 reqs = [] 

3119 # Including any condition expressions 

3120 for req in self._parsed_pkg_info.get_all('Requires-Dist') or []: 

3121 reqs.extend(parse_requirements(req)) 

3122 

3123 def reqs_for_extra(extra): 

3124 for req in reqs: 

3125 if not req.marker or req.marker.evaluate({'extra': extra}): 

3126 yield req 

3127 

3128 common = types.MappingProxyType(dict.fromkeys(reqs_for_extra(None))) 

3129 dm[None].extend(common) 

3130 

3131 for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []: 

3132 s_extra = safe_extra(extra.strip()) 

3133 dm[s_extra] = [r for r in reqs_for_extra(extra) if r not in common] 

3134 

3135 return dm 

3136 

3137 

3138_distributionImpl = { 

3139 '.egg': Distribution, 

3140 '.egg-info': EggInfoDistribution, 

3141 '.dist-info': DistInfoDistribution, 

3142} 

3143 

3144 

3145def issue_warning(*args, **kw): 

3146 level = 1 

3147 g = globals() 

3148 try: 

3149 # find the first stack frame that is *not* code in 

3150 # the pkg_resources module, to use for the warning 

3151 while sys._getframe(level).f_globals is g: 

3152 level += 1 

3153 except ValueError: 

3154 pass 

3155 warnings.warn(stacklevel=level + 1, *args, **kw) 

3156 

3157 

3158def parse_requirements(strs): 

3159 """ 

3160 Yield ``Requirement`` objects for each specification in `strs`. 

3161 

3162 `strs` must be a string, or a (possibly-nested) iterable thereof. 

3163 """ 

3164 return map(Requirement, join_continuation(map(drop_comment, yield_lines(strs)))) 

3165 

3166 

3167class RequirementParseError(packaging.requirements.InvalidRequirement): 

3168 "Compatibility wrapper for InvalidRequirement" 

3169 

3170 

3171class Requirement(packaging.requirements.Requirement): 

3172 def __init__(self, requirement_string): 

3173 """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!""" 

3174 super(Requirement, self).__init__(requirement_string) 

3175 self.unsafe_name = self.name 

3176 project_name = safe_name(self.name) 

3177 self.project_name, self.key = project_name, project_name.lower() 

3178 self.specs = [(spec.operator, spec.version) for spec in self.specifier] 

3179 self.extras = tuple(map(safe_extra, self.extras)) 

3180 self.hashCmp = ( 

3181 self.key, 

3182 self.url, 

3183 self.specifier, 

3184 frozenset(self.extras), 

3185 str(self.marker) if self.marker else None, 

3186 ) 

3187 self.__hash = hash(self.hashCmp) 

3188 

3189 def __eq__(self, other): 

3190 return isinstance(other, Requirement) and self.hashCmp == other.hashCmp 

3191 

3192 def __ne__(self, other): 

3193 return not self == other 

3194 

3195 def __contains__(self, item): 

3196 if isinstance(item, Distribution): 

3197 if item.key != self.key: 

3198 return False 

3199 

3200 item = item.version 

3201 

3202 # Allow prereleases always in order to match the previous behavior of 

3203 # this method. In the future this should be smarter and follow PEP 440 

3204 # more accurately. 

3205 return self.specifier.contains(item, prereleases=True) 

3206 

3207 def __hash__(self): 

3208 return self.__hash 

3209 

3210 def __repr__(self): 

3211 return "Requirement.parse(%r)" % str(self) 

3212 

3213 @staticmethod 

3214 def parse(s): 

3215 (req,) = parse_requirements(s) 

3216 return req 

3217 

3218 

3219def _always_object(classes): 

3220 """ 

3221 Ensure object appears in the mro even 

3222 for old-style classes. 

3223 """ 

3224 if object not in classes: 

3225 return classes + (object,) 

3226 return classes 

3227 

3228 

3229def _find_adapter(registry, ob): 

3230 """Return an adapter factory for `ob` from `registry`""" 

3231 types = _always_object(inspect.getmro(getattr(ob, '__class__', type(ob)))) 

3232 for t in types: 

3233 if t in registry: 

3234 return registry[t] 

3235 

3236 

3237def ensure_directory(path): 

3238 """Ensure that the parent directory of `path` exists""" 

3239 dirname = os.path.dirname(path) 

3240 os.makedirs(dirname, exist_ok=True) 

3241 

3242 

3243def _bypass_ensure_directory(path): 

3244 """Sandbox-bypassing version of ensure_directory()""" 

3245 if not WRITE_SUPPORT: 

3246 raise IOError('"os.mkdir" not supported on this platform.') 

3247 dirname, filename = split(path) 

3248 if dirname and filename and not isdir(dirname): 

3249 _bypass_ensure_directory(dirname) 

3250 try: 

3251 mkdir(dirname, 0o755) 

3252 except FileExistsError: 

3253 pass 

3254 

3255 

3256def split_sections(s): 

3257 """Split a string or iterable thereof into (section, content) pairs 

3258 

3259 Each ``section`` is a stripped version of the section header ("[section]") 

3260 and each ``content`` is a list of stripped lines excluding blank lines and 

3261 comment-only lines. If there are any such lines before the first section 

3262 header, they're returned in a first ``section`` of ``None``. 

3263 """ 

3264 section = None 

3265 content = [] 

3266 for line in yield_lines(s): 

3267 if line.startswith("["): 

3268 if line.endswith("]"): 

3269 if section or content: 

3270 yield section, content 

3271 section = line[1:-1].strip() 

3272 content = [] 

3273 else: 

3274 raise ValueError("Invalid section heading", line) 

3275 else: 

3276 content.append(line) 

3277 

3278 # wrap up last segment 

3279 yield section, content 

3280 

3281 

3282def _mkstemp(*args, **kw): 

3283 old_open = os.open 

3284 try: 

3285 # temporarily bypass sandboxing 

3286 os.open = os_open 

3287 return tempfile.mkstemp(*args, **kw) 

3288 finally: 

3289 # and then put it back 

3290 os.open = old_open 

3291 

3292 

3293# Silence the PEP440Warning by default, so that end users don't get hit by it 

3294# randomly just because they use pkg_resources. We want to append the rule 

3295# because we want earlier uses of filterwarnings to take precedence over this 

3296# one. 

3297warnings.filterwarnings("ignore", category=PEP440Warning, append=True) 

3298 

3299 

3300# from jaraco.functools 1.3 

3301def _call_aside(f, *args, **kwargs): 

3302 f(*args, **kwargs) 

3303 return f 

3304 

3305 

3306@_call_aside 

3307def _initialize(g=globals()): 

3308 "Set up global resource manager (deliberately not state-saved)" 

3309 manager = ResourceManager() 

3310 g['_manager'] = manager 

3311 g.update( 

3312 (name, getattr(manager, name)) 

3313 for name in dir(manager) 

3314 if not name.startswith('_') 

3315 ) 

3316 

3317 

3318class PkgResourcesDeprecationWarning(Warning): 

3319 """ 

3320 Base class for warning about deprecations in ``pkg_resources`` 

3321 

3322 This class is not derived from ``DeprecationWarning``, and as such is 

3323 visible by default. 

3324 """ 

3325 

3326 

3327@_call_aside 

3328def _initialize_master_working_set(): 

3329 """ 

3330 Prepare the master working set and make the ``require()`` 

3331 API available. 

3332 

3333 This function has explicit effects on the global state 

3334 of pkg_resources. It is intended to be invoked once at 

3335 the initialization of this module. 

3336 

3337 Invocation by other packages is unsupported and done 

3338 at their own risk. 

3339 """ 

3340 working_set = WorkingSet._build_master() 

3341 _declare_state('object', working_set=working_set) 

3342 

3343 require = working_set.require 

3344 iter_entry_points = working_set.iter_entry_points 

3345 add_activation_listener = working_set.subscribe 

3346 run_script = working_set.run_script 

3347 # backward compatibility 

3348 run_main = run_script 

3349 # Activate all distributions already on sys.path with replace=False and 

3350 # ensure that all distributions added to the working set in the future 

3351 # (e.g. by calling ``require()``) will get activated as well, 

3352 # with higher priority (replace=True). 

3353 tuple(dist.activate(replace=False) for dist in working_set) 

3354 add_activation_listener( 

3355 lambda dist: dist.activate(replace=True), 

3356 existing=False, 

3357 ) 

3358 working_set.entries = [] 

3359 # match order 

3360 list(map(working_set.add_entry, sys.path)) 

3361 globals().update(locals())