Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/pkg_resources/__init__.py: 2%

1573 statements  

« prev     ^ index     » next       coverage.py v7.2.7, created at 2023-06-07 06:35 +0000

1""" 

2Package resource API 

3-------------------- 

4 

5A resource is a logical file contained within a package, or a logical 

6subdirectory thereof. The package resource API expects resource names 

7to have their path parts separated with ``/``, *not* whatever the local 

8path separator is. Do not use os.path operations to manipulate resource 

9names being passed into the API. 

10 

11The package resource API is designed to work with normal filesystem packages, 

12.egg files, and unpacked .egg files. It can also work in a limited way with 

13.zip files and with custom PEP 302 loaders that support the ``get_data()`` 

14method. 

15 

16This module is deprecated. Users are directed to 

17`importlib.resources <https://docs.python.org/3/library/importlib.resources.html>`_ 

18and 

19`importlib.metadata <https://docs.python.org/3/library/importlib.metadata.html>`_ 

20instead. 

21""" 

22 

23import sys 

24import os 

25import io 

26import time 

27import re 

28import types 

29import zipfile 

30import zipimport 

31import warnings 

32import stat 

33import functools 

34import pkgutil 

35import operator 

36import platform 

37import collections 

38import plistlib 

39import email.parser 

40import errno 

41import tempfile 

42import textwrap 

43import inspect 

44import ntpath 

45import posixpath 

46import importlib 

47from pkgutil import get_importer 

48 

49try: 

50 import _imp 

51except ImportError: 

52 # Python 3.2 compatibility 

53 import imp as _imp 

54 

55try: 

56 FileExistsError 

57except NameError: 

58 FileExistsError = OSError 

59 

60# capture these to bypass sandboxing 

61from os import utime 

62 

63try: 

64 from os import mkdir, rename, unlink 

65 

66 WRITE_SUPPORT = True 

67except ImportError: 

68 # no write support, probably under GAE 

69 WRITE_SUPPORT = False 

70 

71from os import open as os_open 

72from os.path import isdir, split 

73 

74try: 

75 import importlib.machinery as importlib_machinery 

76 

77 # access attribute to force import under delayed import mechanisms. 

78 importlib_machinery.__name__ 

79except ImportError: 

80 importlib_machinery = None 

81 

82from pkg_resources.extern.jaraco.text import ( 

83 yield_lines, 

84 drop_comment, 

85 join_continuation, 

86) 

87 

88from pkg_resources.extern import platformdirs 

89from pkg_resources.extern import packaging 

90 

91__import__('pkg_resources.extern.packaging.version') 

92__import__('pkg_resources.extern.packaging.specifiers') 

93__import__('pkg_resources.extern.packaging.requirements') 

94__import__('pkg_resources.extern.packaging.markers') 

95__import__('pkg_resources.extern.packaging.utils') 

96 

97if sys.version_info < (3, 5): 

98 raise RuntimeError("Python 3.5 or later is required") 

99 

100# declare some globals that will be defined later to 

101# satisfy the linters. 

102require = None 

103working_set = None 

104add_activation_listener = None 

105resources_stream = None 

106cleanup_resources = None 

107resource_dir = None 

108resource_stream = None 

109set_extraction_path = None 

110resource_isdir = None 

111resource_string = None 

112iter_entry_points = None 

113resource_listdir = None 

114resource_filename = None 

115resource_exists = None 

116_distribution_finders = None 

117_namespace_handlers = None 

118_namespace_packages = None 

119 

120 

121warnings.warn("pkg_resources is deprecated as an API", DeprecationWarning) 

122 

123 

124_PEP440_FALLBACK = re.compile(r"^v?(?P<safe>(?:[0-9]+!)?[0-9]+(?:\.[0-9]+)*)", re.I) 

125 

126 

127class PEP440Warning(RuntimeWarning): 

128 """ 

129 Used when there is an issue with a version or specifier not complying with 

130 PEP 440. 

131 """ 

132 

133 

134parse_version = packaging.version.Version 

135 

136 

137_state_vars = {} 

138 

139 

140def _declare_state(vartype, **kw): 

141 globals().update(kw) 

142 _state_vars.update(dict.fromkeys(kw, vartype)) 

143 

144 

145def __getstate__(): 

146 state = {} 

147 g = globals() 

148 for k, v in _state_vars.items(): 

149 state[k] = g['_sget_' + v](g[k]) 

150 return state 

151 

152 

153def __setstate__(state): 

154 g = globals() 

155 for k, v in state.items(): 

156 g['_sset_' + _state_vars[k]](k, g[k], v) 

157 return state 

158 

159 

160def _sget_dict(val): 

161 return val.copy() 

162 

163 

164def _sset_dict(key, ob, state): 

165 ob.clear() 

166 ob.update(state) 

167 

168 

169def _sget_object(val): 

170 return val.__getstate__() 

171 

172 

173def _sset_object(key, ob, state): 

174 ob.__setstate__(state) 

175 

176 

177_sget_none = _sset_none = lambda *args: None 

178 

179 

180def get_supported_platform(): 

181 """Return this platform's maximum compatible version. 

182 

183 distutils.util.get_platform() normally reports the minimum version 

184 of macOS that would be required to *use* extensions produced by 

185 distutils. But what we want when checking compatibility is to know the 

186 version of macOS that we are *running*. To allow usage of packages that 

187 explicitly require a newer version of macOS, we must also know the 

188 current version of the OS. 

189 

190 If this condition occurs for any other platform with a version in its 

191 platform strings, this function should be extended accordingly. 

192 """ 

193 plat = get_build_platform() 

194 m = macosVersionString.match(plat) 

195 if m is not None and sys.platform == "darwin": 

196 try: 

197 plat = 'macosx-%s-%s' % ('.'.join(_macos_vers()[:2]), m.group(3)) 

198 except ValueError: 

199 # not macOS 

200 pass 

201 return plat 

202 

203 

204__all__ = [ 

205 # Basic resource access and distribution/entry point discovery 

206 'require', 

207 'run_script', 

208 'get_provider', 

209 'get_distribution', 

210 'load_entry_point', 

211 'get_entry_map', 

212 'get_entry_info', 

213 'iter_entry_points', 

214 'resource_string', 

215 'resource_stream', 

216 'resource_filename', 

217 'resource_listdir', 

218 'resource_exists', 

219 'resource_isdir', 

220 # Environmental control 

221 'declare_namespace', 

222 'working_set', 

223 'add_activation_listener', 

224 'find_distributions', 

225 'set_extraction_path', 

226 'cleanup_resources', 

227 'get_default_cache', 

228 # Primary implementation classes 

229 'Environment', 

230 'WorkingSet', 

231 'ResourceManager', 

232 'Distribution', 

233 'Requirement', 

234 'EntryPoint', 

235 # Exceptions 

236 'ResolutionError', 

237 'VersionConflict', 

238 'DistributionNotFound', 

239 'UnknownExtra', 

240 'ExtractionError', 

241 # Warnings 

242 'PEP440Warning', 

243 # Parsing functions and string utilities 

244 'parse_requirements', 

245 'parse_version', 

246 'safe_name', 

247 'safe_version', 

248 'get_platform', 

249 'compatible_platforms', 

250 'yield_lines', 

251 'split_sections', 

252 'safe_extra', 

253 'to_filename', 

254 'invalid_marker', 

255 'evaluate_marker', 

256 # filesystem utilities 

257 'ensure_directory', 

258 'normalize_path', 

259 # Distribution "precedence" constants 

260 'EGG_DIST', 

261 'BINARY_DIST', 

262 'SOURCE_DIST', 

263 'CHECKOUT_DIST', 

264 'DEVELOP_DIST', 

265 # "Provider" interfaces, implementations, and registration/lookup APIs 

266 'IMetadataProvider', 

267 'IResourceProvider', 

268 'FileMetadata', 

269 'PathMetadata', 

270 'EggMetadata', 

271 'EmptyProvider', 

272 'empty_provider', 

273 'NullProvider', 

274 'EggProvider', 

275 'DefaultProvider', 

276 'ZipProvider', 

277 'register_finder', 

278 'register_namespace_handler', 

279 'register_loader_type', 

280 'fixup_namespace_packages', 

281 'get_importer', 

282 # Warnings 

283 'PkgResourcesDeprecationWarning', 

284 # Deprecated/backward compatibility only 

285 'run_main', 

286 'AvailableDistributions', 

287] 

288 

289 

290class ResolutionError(Exception): 

291 """Abstract base for dependency resolution errors""" 

292 

293 def __repr__(self): 

294 return self.__class__.__name__ + repr(self.args) 

295 

296 

297class VersionConflict(ResolutionError): 

298 """ 

299 An already-installed version conflicts with the requested version. 

300 

301 Should be initialized with the installed Distribution and the requested 

302 Requirement. 

303 """ 

304 

305 _template = "{self.dist} is installed but {self.req} is required" 

306 

307 @property 

308 def dist(self): 

309 return self.args[0] 

310 

311 @property 

312 def req(self): 

313 return self.args[1] 

314 

315 def report(self): 

316 return self._template.format(**locals()) 

317 

318 def with_context(self, required_by): 

319 """ 

320 If required_by is non-empty, return a version of self that is a 

321 ContextualVersionConflict. 

322 """ 

323 if not required_by: 

324 return self 

325 args = self.args + (required_by,) 

326 return ContextualVersionConflict(*args) 

327 

328 

329class ContextualVersionConflict(VersionConflict): 

330 """ 

331 A VersionConflict that accepts a third parameter, the set of the 

332 requirements that required the installed Distribution. 

333 """ 

334 

335 _template = VersionConflict._template + ' by {self.required_by}' 

336 

337 @property 

338 def required_by(self): 

339 return self.args[2] 

340 

341 

342class DistributionNotFound(ResolutionError): 

343 """A requested distribution was not found""" 

344 

345 _template = ( 

346 "The '{self.req}' distribution was not found " 

347 "and is required by {self.requirers_str}" 

348 ) 

349 

350 @property 

351 def req(self): 

352 return self.args[0] 

353 

354 @property 

355 def requirers(self): 

356 return self.args[1] 

357 

358 @property 

359 def requirers_str(self): 

360 if not self.requirers: 

361 return 'the application' 

362 return ', '.join(self.requirers) 

363 

364 def report(self): 

365 return self._template.format(**locals()) 

366 

367 def __str__(self): 

368 return self.report() 

369 

370 

371class UnknownExtra(ResolutionError): 

372 """Distribution doesn't have an "extra feature" of the given name""" 

373 

374 

375_provider_factories = {} 

376 

377PY_MAJOR = '{}.{}'.format(*sys.version_info) 

378EGG_DIST = 3 

379BINARY_DIST = 2 

380SOURCE_DIST = 1 

381CHECKOUT_DIST = 0 

382DEVELOP_DIST = -1 

383 

384 

385def register_loader_type(loader_type, provider_factory): 

386 """Register `provider_factory` to make providers for `loader_type` 

387 

388 `loader_type` is the type or class of a PEP 302 ``module.__loader__``, 

389 and `provider_factory` is a function that, passed a *module* object, 

390 returns an ``IResourceProvider`` for that module. 

391 """ 

392 _provider_factories[loader_type] = provider_factory 

393 

394 

395def get_provider(moduleOrReq): 

396 """Return an IResourceProvider for the named module or requirement""" 

397 if isinstance(moduleOrReq, Requirement): 

398 return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0] 

399 try: 

400 module = sys.modules[moduleOrReq] 

401 except KeyError: 

402 __import__(moduleOrReq) 

403 module = sys.modules[moduleOrReq] 

404 loader = getattr(module, '__loader__', None) 

405 return _find_adapter(_provider_factories, loader)(module) 

406 

407 

408def _macos_vers(_cache=[]): 

409 if not _cache: 

410 version = platform.mac_ver()[0] 

411 # fallback for MacPorts 

412 if version == '': 

413 plist = '/System/Library/CoreServices/SystemVersion.plist' 

414 if os.path.exists(plist): 

415 if hasattr(plistlib, 'readPlist'): 

416 plist_content = plistlib.readPlist(plist) 

417 if 'ProductVersion' in plist_content: 

418 version = plist_content['ProductVersion'] 

419 

420 _cache.append(version.split('.')) 

421 return _cache[0] 

422 

423 

424def _macos_arch(machine): 

425 return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine) 

426 

427 

428def get_build_platform(): 

429 """Return this platform's string for platform-specific distributions 

430 

431 XXX Currently this is the same as ``distutils.util.get_platform()``, but it 

432 needs some hacks for Linux and macOS. 

433 """ 

434 from sysconfig import get_platform 

435 

436 plat = get_platform() 

437 if sys.platform == "darwin" and not plat.startswith('macosx-'): 

438 try: 

439 version = _macos_vers() 

440 machine = os.uname()[4].replace(" ", "_") 

441 return "macosx-%d.%d-%s" % ( 

442 int(version[0]), 

443 int(version[1]), 

444 _macos_arch(machine), 

445 ) 

446 except ValueError: 

447 # if someone is running a non-Mac darwin system, this will fall 

448 # through to the default implementation 

449 pass 

450 return plat 

451 

452 

453macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)") 

454darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)") 

455# XXX backward compat 

456get_platform = get_build_platform 

457 

458 

459def compatible_platforms(provided, required): 

460 """Can code for the `provided` platform run on the `required` platform? 

461 

462 Returns true if either platform is ``None``, or the platforms are equal. 

463 

464 XXX Needs compatibility checks for Linux and other unixy OSes. 

465 """ 

466 if provided is None or required is None or provided == required: 

467 # easy case 

468 return True 

469 

470 # macOS special cases 

471 reqMac = macosVersionString.match(required) 

472 if reqMac: 

473 provMac = macosVersionString.match(provided) 

474 

475 # is this a Mac package? 

476 if not provMac: 

477 # this is backwards compatibility for packages built before 

478 # setuptools 0.6. All packages built after this point will 

479 # use the new macOS designation. 

480 provDarwin = darwinVersionString.match(provided) 

481 if provDarwin: 

482 dversion = int(provDarwin.group(1)) 

483 macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2)) 

484 if ( 

485 dversion == 7 

486 and macosversion >= "10.3" 

487 or dversion == 8 

488 and macosversion >= "10.4" 

489 ): 

490 return True 

491 # egg isn't macOS or legacy darwin 

492 return False 

493 

494 # are they the same major version and machine type? 

495 if provMac.group(1) != reqMac.group(1) or provMac.group(3) != reqMac.group(3): 

496 return False 

497 

498 # is the required OS major update >= the provided one? 

499 if int(provMac.group(2)) > int(reqMac.group(2)): 

500 return False 

501 

502 return True 

503 

504 # XXX Linux and other platforms' special cases should go here 

505 return False 

506 

507 

508def run_script(dist_spec, script_name): 

509 """Locate distribution `dist_spec` and run its `script_name` script""" 

510 ns = sys._getframe(1).f_globals 

511 name = ns['__name__'] 

512 ns.clear() 

513 ns['__name__'] = name 

514 require(dist_spec)[0].run_script(script_name, ns) 

515 

516 

517# backward compatibility 

518run_main = run_script 

519 

520 

521def get_distribution(dist): 

522 """Return a current distribution object for a Requirement or string""" 

523 if isinstance(dist, str): 

524 dist = Requirement.parse(dist) 

525 if isinstance(dist, Requirement): 

526 dist = get_provider(dist) 

527 if not isinstance(dist, Distribution): 

528 raise TypeError("Expected string, Requirement, or Distribution", dist) 

529 return dist 

530 

531 

532def load_entry_point(dist, group, name): 

533 """Return `name` entry point of `group` for `dist` or raise ImportError""" 

534 return get_distribution(dist).load_entry_point(group, name) 

535 

536 

537def get_entry_map(dist, group=None): 

538 """Return the entry point map for `group`, or the full entry map""" 

539 return get_distribution(dist).get_entry_map(group) 

540 

541 

542def get_entry_info(dist, group, name): 

543 """Return the EntryPoint object for `group`+`name`, or ``None``""" 

544 return get_distribution(dist).get_entry_info(group, name) 

545 

546 

547class IMetadataProvider: 

548 def has_metadata(name): 

549 """Does the package's distribution contain the named metadata?""" 

550 

551 def get_metadata(name): 

552 """The named metadata resource as a string""" 

553 

554 def get_metadata_lines(name): 

555 """Yield named metadata resource as list of non-blank non-comment lines 

556 

557 Leading and trailing whitespace is stripped from each line, and lines 

558 with ``#`` as the first non-blank character are omitted.""" 

559 

560 def metadata_isdir(name): 

561 """Is the named metadata a directory? (like ``os.path.isdir()``)""" 

562 

563 def metadata_listdir(name): 

564 """List of metadata names in the directory (like ``os.listdir()``)""" 

565 

566 def run_script(script_name, namespace): 

567 """Execute the named script in the supplied namespace dictionary""" 

568 

569 

570class IResourceProvider(IMetadataProvider): 

571 """An object that provides access to package resources""" 

572 

573 def get_resource_filename(manager, resource_name): 

574 """Return a true filesystem path for `resource_name` 

575 

576 `manager` must be an ``IResourceManager``""" 

577 

578 def get_resource_stream(manager, resource_name): 

579 """Return a readable file-like object for `resource_name` 

580 

581 `manager` must be an ``IResourceManager``""" 

582 

583 def get_resource_string(manager, resource_name): 

584 """Return a string containing the contents of `resource_name` 

585 

586 `manager` must be an ``IResourceManager``""" 

587 

588 def has_resource(resource_name): 

589 """Does the package contain the named resource?""" 

590 

591 def resource_isdir(resource_name): 

592 """Is the named resource a directory? (like ``os.path.isdir()``)""" 

593 

594 def resource_listdir(resource_name): 

595 """List of resource names in the directory (like ``os.listdir()``)""" 

596 

597 

598class WorkingSet: 

599 """A collection of active distributions on sys.path (or a similar list)""" 

600 

601 def __init__(self, entries=None): 

602 """Create working set from list of path entries (default=sys.path)""" 

603 self.entries = [] 

604 self.entry_keys = {} 

605 self.by_key = {} 

606 self.normalized_to_canonical_keys = {} 

607 self.callbacks = [] 

608 

609 if entries is None: 

610 entries = sys.path 

611 

612 for entry in entries: 

613 self.add_entry(entry) 

614 

615 @classmethod 

616 def _build_master(cls): 

617 """ 

618 Prepare the master working set. 

619 """ 

620 ws = cls() 

621 try: 

622 from __main__ import __requires__ 

623 except ImportError: 

624 # The main program does not list any requirements 

625 return ws 

626 

627 # ensure the requirements are met 

628 try: 

629 ws.require(__requires__) 

630 except VersionConflict: 

631 return cls._build_from_requirements(__requires__) 

632 

633 return ws 

634 

635 @classmethod 

636 def _build_from_requirements(cls, req_spec): 

637 """ 

638 Build a working set from a requirement spec. Rewrites sys.path. 

639 """ 

640 # try it without defaults already on sys.path 

641 # by starting with an empty path 

642 ws = cls([]) 

643 reqs = parse_requirements(req_spec) 

644 dists = ws.resolve(reqs, Environment()) 

645 for dist in dists: 

646 ws.add(dist) 

647 

648 # add any missing entries from sys.path 

649 for entry in sys.path: 

650 if entry not in ws.entries: 

651 ws.add_entry(entry) 

652 

653 # then copy back to sys.path 

654 sys.path[:] = ws.entries 

655 return ws 

656 

657 def add_entry(self, entry): 

658 """Add a path item to ``.entries``, finding any distributions on it 

659 

660 ``find_distributions(entry, True)`` is used to find distributions 

661 corresponding to the path entry, and they are added. `entry` is 

662 always appended to ``.entries``, even if it is already present. 

663 (This is because ``sys.path`` can contain the same value more than 

664 once, and the ``.entries`` of the ``sys.path`` WorkingSet should always 

665 equal ``sys.path``.) 

666 """ 

667 self.entry_keys.setdefault(entry, []) 

668 self.entries.append(entry) 

669 for dist in find_distributions(entry, True): 

670 self.add(dist, entry, False) 

671 

672 def __contains__(self, dist): 

673 """True if `dist` is the active distribution for its project""" 

674 return self.by_key.get(dist.key) == dist 

675 

676 def find(self, req): 

677 """Find a distribution matching requirement `req` 

678 

679 If there is an active distribution for the requested project, this 

680 returns it as long as it meets the version requirement specified by 

681 `req`. But, if there is an active distribution for the project and it 

682 does *not* meet the `req` requirement, ``VersionConflict`` is raised. 

683 If there is no active distribution for the requested project, ``None`` 

684 is returned. 

685 """ 

686 dist = self.by_key.get(req.key) 

687 

688 if dist is None: 

689 canonical_key = self.normalized_to_canonical_keys.get(req.key) 

690 

691 if canonical_key is not None: 

692 req.key = canonical_key 

693 dist = self.by_key.get(canonical_key) 

694 

695 if dist is not None and dist not in req: 

696 # XXX add more info 

697 raise VersionConflict(dist, req) 

698 return dist 

699 

700 def iter_entry_points(self, group, name=None): 

701 """Yield entry point objects from `group` matching `name` 

702 

703 If `name` is None, yields all entry points in `group` from all 

704 distributions in the working set, otherwise only ones matching 

705 both `group` and `name` are yielded (in distribution order). 

706 """ 

707 return ( 

708 entry 

709 for dist in self 

710 for entry in dist.get_entry_map(group).values() 

711 if name is None or name == entry.name 

712 ) 

713 

714 def run_script(self, requires, script_name): 

715 """Locate distribution for `requires` and run `script_name` script""" 

716 ns = sys._getframe(1).f_globals 

717 name = ns['__name__'] 

718 ns.clear() 

719 ns['__name__'] = name 

720 self.require(requires)[0].run_script(script_name, ns) 

721 

722 def __iter__(self): 

723 """Yield distributions for non-duplicate projects in the working set 

724 

725 The yield order is the order in which the items' path entries were 

726 added to the working set. 

727 """ 

728 seen = {} 

729 for item in self.entries: 

730 if item not in self.entry_keys: 

731 # workaround a cache issue 

732 continue 

733 

734 for key in self.entry_keys[item]: 

735 if key not in seen: 

736 seen[key] = 1 

737 yield self.by_key[key] 

738 

739 def add(self, dist, entry=None, insert=True, replace=False): 

740 """Add `dist` to working set, associated with `entry` 

741 

742 If `entry` is unspecified, it defaults to the ``.location`` of `dist`. 

743 On exit from this routine, `entry` is added to the end of the working 

744 set's ``.entries`` (if it wasn't already present). 

745 

746 `dist` is only added to the working set if it's for a project that 

747 doesn't already have a distribution in the set, unless `replace=True`. 

748 If it's added, any callbacks registered with the ``subscribe()`` method 

749 will be called. 

750 """ 

751 if insert: 

752 dist.insert_on(self.entries, entry, replace=replace) 

753 

754 if entry is None: 

755 entry = dist.location 

756 keys = self.entry_keys.setdefault(entry, []) 

757 keys2 = self.entry_keys.setdefault(dist.location, []) 

758 if not replace and dist.key in self.by_key: 

759 # ignore hidden distros 

760 return 

761 

762 self.by_key[dist.key] = dist 

763 normalized_name = packaging.utils.canonicalize_name(dist.key) 

764 self.normalized_to_canonical_keys[normalized_name] = dist.key 

765 if dist.key not in keys: 

766 keys.append(dist.key) 

767 if dist.key not in keys2: 

768 keys2.append(dist.key) 

769 self._added_new(dist) 

770 

771 def resolve( 

772 self, 

773 requirements, 

774 env=None, 

775 installer=None, 

776 replace_conflicting=False, 

777 extras=None, 

778 ): 

779 """List all distributions needed to (recursively) meet `requirements` 

780 

781 `requirements` must be a sequence of ``Requirement`` objects. `env`, 

782 if supplied, should be an ``Environment`` instance. If 

783 not supplied, it defaults to all distributions available within any 

784 entry or distribution in the working set. `installer`, if supplied, 

785 will be invoked with each requirement that cannot be met by an 

786 already-installed distribution; it should return a ``Distribution`` or 

787 ``None``. 

788 

789 Unless `replace_conflicting=True`, raises a VersionConflict exception 

790 if 

791 any requirements are found on the path that have the correct name but 

792 the wrong version. Otherwise, if an `installer` is supplied it will be 

793 invoked to obtain the correct version of the requirement and activate 

794 it. 

795 

796 `extras` is a list of the extras to be used with these requirements. 

797 This is important because extra requirements may look like `my_req; 

798 extra = "my_extra"`, which would otherwise be interpreted as a purely 

799 optional requirement. Instead, we want to be able to assert that these 

800 requirements are truly required. 

801 """ 

802 

803 # set up the stack 

804 requirements = list(requirements)[::-1] 

805 # set of processed requirements 

806 processed = {} 

807 # key -> dist 

808 best = {} 

809 to_activate = [] 

810 

811 req_extras = _ReqExtras() 

812 

813 # Mapping of requirement to set of distributions that required it; 

814 # useful for reporting info about conflicts. 

815 required_by = collections.defaultdict(set) 

816 

817 while requirements: 

818 # process dependencies breadth-first 

819 req = requirements.pop(0) 

820 if req in processed: 

821 # Ignore cyclic or redundant dependencies 

822 continue 

823 

824 if not req_extras.markers_pass(req, extras): 

825 continue 

826 

827 dist = self._resolve_dist( 

828 req, best, replace_conflicting, env, installer, required_by, to_activate 

829 ) 

830 

831 # push the new requirements onto the stack 

832 new_requirements = dist.requires(req.extras)[::-1] 

833 requirements.extend(new_requirements) 

834 

835 # Register the new requirements needed by req 

836 for new_requirement in new_requirements: 

837 required_by[new_requirement].add(req.project_name) 

838 req_extras[new_requirement] = req.extras 

839 

840 processed[req] = True 

841 

842 # return list of distros to activate 

843 return to_activate 

844 

845 def _resolve_dist( 

846 self, req, best, replace_conflicting, env, installer, required_by, to_activate 

847 ): 

848 dist = best.get(req.key) 

849 if dist is None: 

850 # Find the best distribution and add it to the map 

851 dist = self.by_key.get(req.key) 

852 if dist is None or (dist not in req and replace_conflicting): 

853 ws = self 

854 if env is None: 

855 if dist is None: 

856 env = Environment(self.entries) 

857 else: 

858 # Use an empty environment and workingset to avoid 

859 # any further conflicts with the conflicting 

860 # distribution 

861 env = Environment([]) 

862 ws = WorkingSet([]) 

863 dist = best[req.key] = env.best_match( 

864 req, ws, installer, replace_conflicting=replace_conflicting 

865 ) 

866 if dist is None: 

867 requirers = required_by.get(req, None) 

868 raise DistributionNotFound(req, requirers) 

869 to_activate.append(dist) 

870 if dist not in req: 

871 # Oops, the "best" so far conflicts with a dependency 

872 dependent_req = required_by[req] 

873 raise VersionConflict(dist, req).with_context(dependent_req) 

874 return dist 

875 

876 def find_plugins(self, plugin_env, full_env=None, installer=None, fallback=True): 

877 """Find all activatable distributions in `plugin_env` 

878 

879 Example usage:: 

880 

881 distributions, errors = working_set.find_plugins( 

882 Environment(plugin_dirlist) 

883 ) 

884 # add plugins+libs to sys.path 

885 map(working_set.add, distributions) 

886 # display errors 

887 print('Could not load', errors) 

888 

889 The `plugin_env` should be an ``Environment`` instance that contains 

890 only distributions that are in the project's "plugin directory" or 

891 directories. The `full_env`, if supplied, should be an ``Environment`` 

892 contains all currently-available distributions. If `full_env` is not 

893 supplied, one is created automatically from the ``WorkingSet`` this 

894 method is called on, which will typically mean that every directory on 

895 ``sys.path`` will be scanned for distributions. 

896 

897 `installer` is a standard installer callback as used by the 

898 ``resolve()`` method. The `fallback` flag indicates whether we should 

899 attempt to resolve older versions of a plugin if the newest version 

900 cannot be resolved. 

901 

902 This method returns a 2-tuple: (`distributions`, `error_info`), where 

903 `distributions` is a list of the distributions found in `plugin_env` 

904 that were loadable, along with any other distributions that are needed 

905 to resolve their dependencies. `error_info` is a dictionary mapping 

906 unloadable plugin distributions to an exception instance describing the 

907 error that occurred. Usually this will be a ``DistributionNotFound`` or 

908 ``VersionConflict`` instance. 

909 """ 

910 

911 plugin_projects = list(plugin_env) 

912 # scan project names in alphabetic order 

913 plugin_projects.sort() 

914 

915 error_info = {} 

916 distributions = {} 

917 

918 if full_env is None: 

919 env = Environment(self.entries) 

920 env += plugin_env 

921 else: 

922 env = full_env + plugin_env 

923 

924 shadow_set = self.__class__([]) 

925 # put all our entries in shadow_set 

926 list(map(shadow_set.add, self)) 

927 

928 for project_name in plugin_projects: 

929 for dist in plugin_env[project_name]: 

930 req = [dist.as_requirement()] 

931 

932 try: 

933 resolvees = shadow_set.resolve(req, env, installer) 

934 

935 except ResolutionError as v: 

936 # save error info 

937 error_info[dist] = v 

938 if fallback: 

939 # try the next older version of project 

940 continue 

941 else: 

942 # give up on this project, keep going 

943 break 

944 

945 else: 

946 list(map(shadow_set.add, resolvees)) 

947 distributions.update(dict.fromkeys(resolvees)) 

948 

949 # success, no need to try any more versions of this project 

950 break 

951 

952 distributions = list(distributions) 

953 distributions.sort() 

954 

955 return distributions, error_info 

956 

957 def require(self, *requirements): 

958 """Ensure that distributions matching `requirements` are activated 

959 

960 `requirements` must be a string or a (possibly-nested) sequence 

961 thereof, specifying the distributions and versions required. The 

962 return value is a sequence of the distributions that needed to be 

963 activated to fulfill the requirements; all relevant distributions are 

964 included, even if they were already activated in this working set. 

965 """ 

966 needed = self.resolve(parse_requirements(requirements)) 

967 

968 for dist in needed: 

969 self.add(dist) 

970 

971 return needed 

972 

973 def subscribe(self, callback, existing=True): 

974 """Invoke `callback` for all distributions 

975 

976 If `existing=True` (default), 

977 call on all existing ones, as well. 

978 """ 

979 if callback in self.callbacks: 

980 return 

981 self.callbacks.append(callback) 

982 if not existing: 

983 return 

984 for dist in self: 

985 callback(dist) 

986 

987 def _added_new(self, dist): 

988 for callback in self.callbacks: 

989 callback(dist) 

990 

991 def __getstate__(self): 

992 return ( 

993 self.entries[:], 

994 self.entry_keys.copy(), 

995 self.by_key.copy(), 

996 self.normalized_to_canonical_keys.copy(), 

997 self.callbacks[:], 

998 ) 

999 

1000 def __setstate__(self, e_k_b_n_c): 

1001 entries, keys, by_key, normalized_to_canonical_keys, callbacks = e_k_b_n_c 

1002 self.entries = entries[:] 

1003 self.entry_keys = keys.copy() 

1004 self.by_key = by_key.copy() 

1005 self.normalized_to_canonical_keys = normalized_to_canonical_keys.copy() 

1006 self.callbacks = callbacks[:] 

1007 

1008 

1009class _ReqExtras(dict): 

1010 """ 

1011 Map each requirement to the extras that demanded it. 

1012 """ 

1013 

1014 def markers_pass(self, req, extras=None): 

1015 """ 

1016 Evaluate markers for req against each extra that 

1017 demanded it. 

1018 

1019 Return False if the req has a marker and fails 

1020 evaluation. Otherwise, return True. 

1021 """ 

1022 extra_evals = ( 

1023 req.marker.evaluate({'extra': extra}) 

1024 for extra in self.get(req, ()) + (extras or (None,)) 

1025 ) 

1026 return not req.marker or any(extra_evals) 

1027 

1028 

1029class Environment: 

1030 """Searchable snapshot of distributions on a search path""" 

1031 

1032 def __init__( 

1033 self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR 

1034 ): 

1035 """Snapshot distributions available on a search path 

1036 

1037 Any distributions found on `search_path` are added to the environment. 

1038 `search_path` should be a sequence of ``sys.path`` items. If not 

1039 supplied, ``sys.path`` is used. 

1040 

1041 `platform` is an optional string specifying the name of the platform 

1042 that platform-specific distributions must be compatible with. If 

1043 unspecified, it defaults to the current platform. `python` is an 

1044 optional string naming the desired version of Python (e.g. ``'3.6'``); 

1045 it defaults to the current version. 

1046 

1047 You may explicitly set `platform` (and/or `python`) to ``None`` if you 

1048 wish to map *all* distributions, not just those compatible with the 

1049 running platform or Python version. 

1050 """ 

1051 self._distmap = {} 

1052 self.platform = platform 

1053 self.python = python 

1054 self.scan(search_path) 

1055 

1056 def can_add(self, dist): 

1057 """Is distribution `dist` acceptable for this environment? 

1058 

1059 The distribution must match the platform and python version 

1060 requirements specified when this environment was created, or False 

1061 is returned. 

1062 """ 

1063 py_compat = ( 

1064 self.python is None 

1065 or dist.py_version is None 

1066 or dist.py_version == self.python 

1067 ) 

1068 return py_compat and compatible_platforms(dist.platform, self.platform) 

1069 

1070 def remove(self, dist): 

1071 """Remove `dist` from the environment""" 

1072 self._distmap[dist.key].remove(dist) 

1073 

1074 def scan(self, search_path=None): 

1075 """Scan `search_path` for distributions usable in this environment 

1076 

1077 Any distributions found are added to the environment. 

1078 `search_path` should be a sequence of ``sys.path`` items. If not 

1079 supplied, ``sys.path`` is used. Only distributions conforming to 

1080 the platform/python version defined at initialization are added. 

1081 """ 

1082 if search_path is None: 

1083 search_path = sys.path 

1084 

1085 for item in search_path: 

1086 for dist in find_distributions(item): 

1087 self.add(dist) 

1088 

1089 def __getitem__(self, project_name): 

1090 """Return a newest-to-oldest list of distributions for `project_name` 

1091 

1092 Uses case-insensitive `project_name` comparison, assuming all the 

1093 project's distributions use their project's name converted to all 

1094 lowercase as their key. 

1095 

1096 """ 

1097 distribution_key = project_name.lower() 

1098 return self._distmap.get(distribution_key, []) 

1099 

1100 def add(self, dist): 

1101 """Add `dist` if we ``can_add()`` it and it has not already been added""" 

1102 if self.can_add(dist) and dist.has_version(): 

1103 dists = self._distmap.setdefault(dist.key, []) 

1104 if dist not in dists: 

1105 dists.append(dist) 

1106 dists.sort(key=operator.attrgetter('hashcmp'), reverse=True) 

1107 

1108 def best_match(self, req, working_set, installer=None, replace_conflicting=False): 

1109 """Find distribution best matching `req` and usable on `working_set` 

1110 

1111 This calls the ``find(req)`` method of the `working_set` to see if a 

1112 suitable distribution is already active. (This may raise 

1113 ``VersionConflict`` if an unsuitable version of the project is already 

1114 active in the specified `working_set`.) If a suitable distribution 

1115 isn't active, this method returns the newest distribution in the 

1116 environment that meets the ``Requirement`` in `req`. If no suitable 

1117 distribution is found, and `installer` is supplied, then the result of 

1118 calling the environment's ``obtain(req, installer)`` method will be 

1119 returned. 

1120 """ 

1121 try: 

1122 dist = working_set.find(req) 

1123 except VersionConflict: 

1124 if not replace_conflicting: 

1125 raise 

1126 dist = None 

1127 if dist is not None: 

1128 return dist 

1129 for dist in self[req.key]: 

1130 if dist in req: 

1131 return dist 

1132 # try to download/install 

1133 return self.obtain(req, installer) 

1134 

1135 def obtain(self, requirement, installer=None): 

1136 """Obtain a distribution matching `requirement` (e.g. via download) 

1137 

1138 Obtain a distro that matches requirement (e.g. via download). In the 

1139 base ``Environment`` class, this routine just returns 

1140 ``installer(requirement)``, unless `installer` is None, in which case 

1141 None is returned instead. This method is a hook that allows subclasses 

1142 to attempt other ways of obtaining a distribution before falling back 

1143 to the `installer` argument.""" 

1144 if installer is not None: 

1145 return installer(requirement) 

1146 

1147 def __iter__(self): 

1148 """Yield the unique project names of the available distributions""" 

1149 for key in self._distmap.keys(): 

1150 if self[key]: 

1151 yield key 

1152 

1153 def __iadd__(self, other): 

1154 """In-place addition of a distribution or environment""" 

1155 if isinstance(other, Distribution): 

1156 self.add(other) 

1157 elif isinstance(other, Environment): 

1158 for project in other: 

1159 for dist in other[project]: 

1160 self.add(dist) 

1161 else: 

1162 raise TypeError("Can't add %r to environment" % (other,)) 

1163 return self 

1164 

1165 def __add__(self, other): 

1166 """Add an environment or distribution to an environment""" 

1167 new = self.__class__([], platform=None, python=None) 

1168 for env in self, other: 

1169 new += env 

1170 return new 

1171 

1172 

1173# XXX backward compatibility 

1174AvailableDistributions = Environment 

1175 

1176 

1177class ExtractionError(RuntimeError): 

1178 """An error occurred extracting a resource 

1179 

1180 The following attributes are available from instances of this exception: 

1181 

1182 manager 

1183 The resource manager that raised this exception 

1184 

1185 cache_path 

1186 The base directory for resource extraction 

1187 

1188 original_error 

1189 The exception instance that caused extraction to fail 

1190 """ 

1191 

1192 

1193class ResourceManager: 

1194 """Manage resource extraction and packages""" 

1195 

1196 extraction_path = None 

1197 

1198 def __init__(self): 

1199 self.cached_files = {} 

1200 

1201 def resource_exists(self, package_or_requirement, resource_name): 

1202 """Does the named resource exist?""" 

1203 return get_provider(package_or_requirement).has_resource(resource_name) 

1204 

1205 def resource_isdir(self, package_or_requirement, resource_name): 

1206 """Is the named resource an existing directory?""" 

1207 return get_provider(package_or_requirement).resource_isdir(resource_name) 

1208 

1209 def resource_filename(self, package_or_requirement, resource_name): 

1210 """Return a true filesystem path for specified resource""" 

1211 return get_provider(package_or_requirement).get_resource_filename( 

1212 self, resource_name 

1213 ) 

1214 

1215 def resource_stream(self, package_or_requirement, resource_name): 

1216 """Return a readable file-like object for specified resource""" 

1217 return get_provider(package_or_requirement).get_resource_stream( 

1218 self, resource_name 

1219 ) 

1220 

1221 def resource_string(self, package_or_requirement, resource_name): 

1222 """Return specified resource as a string""" 

1223 return get_provider(package_or_requirement).get_resource_string( 

1224 self, resource_name 

1225 ) 

1226 

1227 def resource_listdir(self, package_or_requirement, resource_name): 

1228 """List the contents of the named resource directory""" 

1229 return get_provider(package_or_requirement).resource_listdir(resource_name) 

1230 

1231 def extraction_error(self): 

1232 """Give an error message for problems extracting file(s)""" 

1233 

1234 old_exc = sys.exc_info()[1] 

1235 cache_path = self.extraction_path or get_default_cache() 

1236 

1237 tmpl = textwrap.dedent( 

1238 """ 

1239 Can't extract file(s) to egg cache 

1240 

1241 The following error occurred while trying to extract file(s) 

1242 to the Python egg cache: 

1243 

1244 {old_exc} 

1245 

1246 The Python egg cache directory is currently set to: 

1247 

1248 {cache_path} 

1249 

1250 Perhaps your account does not have write access to this directory? 

1251 You can change the cache directory by setting the PYTHON_EGG_CACHE 

1252 environment variable to point to an accessible directory. 

1253 """ 

1254 ).lstrip() 

1255 err = ExtractionError(tmpl.format(**locals())) 

1256 err.manager = self 

1257 err.cache_path = cache_path 

1258 err.original_error = old_exc 

1259 raise err 

1260 

1261 def get_cache_path(self, archive_name, names=()): 

1262 """Return absolute location in cache for `archive_name` and `names` 

1263 

1264 The parent directory of the resulting path will be created if it does 

1265 not already exist. `archive_name` should be the base filename of the 

1266 enclosing egg (which may not be the name of the enclosing zipfile!), 

1267 including its ".egg" extension. `names`, if provided, should be a 

1268 sequence of path name parts "under" the egg's extraction location. 

1269 

1270 This method should only be called by resource providers that need to 

1271 obtain an extraction location, and only for names they intend to 

1272 extract, as it tracks the generated names for possible cleanup later. 

1273 """ 

1274 extract_path = self.extraction_path or get_default_cache() 

1275 target_path = os.path.join(extract_path, archive_name + '-tmp', *names) 

1276 try: 

1277 _bypass_ensure_directory(target_path) 

1278 except Exception: 

1279 self.extraction_error() 

1280 

1281 self._warn_unsafe_extraction_path(extract_path) 

1282 

1283 self.cached_files[target_path] = 1 

1284 return target_path 

1285 

1286 @staticmethod 

1287 def _warn_unsafe_extraction_path(path): 

1288 """ 

1289 If the default extraction path is overridden and set to an insecure 

1290 location, such as /tmp, it opens up an opportunity for an attacker to 

1291 replace an extracted file with an unauthorized payload. Warn the user 

1292 if a known insecure location is used. 

1293 

1294 See Distribute #375 for more details. 

1295 """ 

1296 if os.name == 'nt' and not path.startswith(os.environ['windir']): 

1297 # On Windows, permissions are generally restrictive by default 

1298 # and temp directories are not writable by other users, so 

1299 # bypass the warning. 

1300 return 

1301 mode = os.stat(path).st_mode 

1302 if mode & stat.S_IWOTH or mode & stat.S_IWGRP: 

1303 msg = ( 

1304 "Extraction path is writable by group/others " 

1305 "and vulnerable to attack when " 

1306 "used with get_resource_filename ({path}). " 

1307 "Consider a more secure " 

1308 "location (set with .set_extraction_path or the " 

1309 "PYTHON_EGG_CACHE environment variable)." 

1310 ).format(**locals()) 

1311 warnings.warn(msg, UserWarning) 

1312 

1313 def postprocess(self, tempname, filename): 

1314 """Perform any platform-specific postprocessing of `tempname` 

1315 

1316 This is where Mac header rewrites should be done; other platforms don't 

1317 have anything special they should do. 

1318 

1319 Resource providers should call this method ONLY after successfully 

1320 extracting a compressed resource. They must NOT call it on resources 

1321 that are already in the filesystem. 

1322 

1323 `tempname` is the current (temporary) name of the file, and `filename` 

1324 is the name it will be renamed to by the caller after this routine 

1325 returns. 

1326 """ 

1327 

1328 if os.name == 'posix': 

1329 # Make the resource executable 

1330 mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777 

1331 os.chmod(tempname, mode) 

1332 

1333 def set_extraction_path(self, path): 

1334 """Set the base path where resources will be extracted to, if needed. 

1335 

1336 If you do not call this routine before any extractions take place, the 

1337 path defaults to the return value of ``get_default_cache()``. (Which 

1338 is based on the ``PYTHON_EGG_CACHE`` environment variable, with various 

1339 platform-specific fallbacks. See that routine's documentation for more 

1340 details.) 

1341 

1342 Resources are extracted to subdirectories of this path based upon 

1343 information given by the ``IResourceProvider``. You may set this to a 

1344 temporary directory, but then you must call ``cleanup_resources()`` to 

1345 delete the extracted files when done. There is no guarantee that 

1346 ``cleanup_resources()`` will be able to remove all extracted files. 

1347 

1348 (Note: you may not change the extraction path for a given resource 

1349 manager once resources have been extracted, unless you first call 

1350 ``cleanup_resources()``.) 

1351 """ 

1352 if self.cached_files: 

1353 raise ValueError("Can't change extraction path, files already extracted") 

1354 

1355 self.extraction_path = path 

1356 

1357 def cleanup_resources(self, force=False): 

1358 """ 

1359 Delete all extracted resource files and directories, returning a list 

1360 of the file and directory names that could not be successfully removed. 

1361 This function does not have any concurrency protection, so it should 

1362 generally only be called when the extraction path is a temporary 

1363 directory exclusive to a single process. This method is not 

1364 automatically called; you must call it explicitly or register it as an 

1365 ``atexit`` function if you wish to ensure cleanup of a temporary 

1366 directory used for extractions. 

1367 """ 

1368 # XXX 

1369 

1370 

1371def get_default_cache(): 

1372 """ 

1373 Return the ``PYTHON_EGG_CACHE`` environment variable 

1374 or a platform-relevant user cache dir for an app 

1375 named "Python-Eggs". 

1376 """ 

1377 return os.environ.get('PYTHON_EGG_CACHE') or platformdirs.user_cache_dir( 

1378 appname='Python-Eggs' 

1379 ) 

1380 

1381 

1382def safe_name(name): 

1383 """Convert an arbitrary string to a standard distribution name 

1384 

1385 Any runs of non-alphanumeric/. characters are replaced with a single '-'. 

1386 """ 

1387 return re.sub('[^A-Za-z0-9.]+', '-', name) 

1388 

1389 

1390def safe_version(version): 

1391 """ 

1392 Convert an arbitrary string to a standard version string 

1393 """ 

1394 try: 

1395 # normalize the version 

1396 return str(packaging.version.Version(version)) 

1397 except packaging.version.InvalidVersion: 

1398 version = version.replace(' ', '.') 

1399 return re.sub('[^A-Za-z0-9.]+', '-', version) 

1400 

1401 

1402def _forgiving_version(version): 

1403 """Fallback when ``safe_version`` is not safe enough 

1404 >>> parse_version(_forgiving_version('0.23ubuntu1')) 

1405 <Version('0.23.dev0+sanitized.ubuntu1')> 

1406 >>> parse_version(_forgiving_version('0.23-')) 

1407 <Version('0.23.dev0+sanitized')> 

1408 >>> parse_version(_forgiving_version('0.-_')) 

1409 <Version('0.dev0+sanitized')> 

1410 >>> parse_version(_forgiving_version('42.+?1')) 

1411 <Version('42.dev0+sanitized.1')> 

1412 >>> parse_version(_forgiving_version('hello world')) 

1413 <Version('0.dev0+sanitized.hello.world')> 

1414 """ 

1415 version = version.replace(' ', '.') 

1416 match = _PEP440_FALLBACK.search(version) 

1417 if match: 

1418 safe = match["safe"] 

1419 rest = version[len(safe):] 

1420 else: 

1421 safe = "0" 

1422 rest = version 

1423 local = f"sanitized.{_safe_segment(rest)}".strip(".") 

1424 return f"{safe}.dev0+{local}" 

1425 

1426 

1427def _safe_segment(segment): 

1428 """Convert an arbitrary string into a safe segment""" 

1429 segment = re.sub('[^A-Za-z0-9.]+', '-', segment) 

1430 segment = re.sub('-[^A-Za-z0-9]+', '-', segment) 

1431 return re.sub(r'\.[^A-Za-z0-9]+', '.', segment).strip(".-") 

1432 

1433 

1434def safe_extra(extra): 

1435 """Convert an arbitrary string to a standard 'extra' name 

1436 

1437 Any runs of non-alphanumeric characters are replaced with a single '_', 

1438 and the result is always lowercased. 

1439 """ 

1440 return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower() 

1441 

1442 

1443def to_filename(name): 

1444 """Convert a project or version name to its filename-escaped form 

1445 

1446 Any '-' characters are currently replaced with '_'. 

1447 """ 

1448 return name.replace('-', '_') 

1449 

1450 

1451def invalid_marker(text): 

1452 """ 

1453 Validate text as a PEP 508 environment marker; return an exception 

1454 if invalid or False otherwise. 

1455 """ 

1456 try: 

1457 evaluate_marker(text) 

1458 except SyntaxError as e: 

1459 e.filename = None 

1460 e.lineno = None 

1461 return e 

1462 return False 

1463 

1464 

1465def evaluate_marker(text, extra=None): 

1466 """ 

1467 Evaluate a PEP 508 environment marker. 

1468 Return a boolean indicating the marker result in this environment. 

1469 Raise SyntaxError if marker is invalid. 

1470 

1471 This implementation uses the 'pyparsing' module. 

1472 """ 

1473 try: 

1474 marker = packaging.markers.Marker(text) 

1475 return marker.evaluate() 

1476 except packaging.markers.InvalidMarker as e: 

1477 raise SyntaxError(e) from e 

1478 

1479 

1480class NullProvider: 

1481 """Try to implement resources and metadata for arbitrary PEP 302 loaders""" 

1482 

1483 egg_name = None 

1484 egg_info = None 

1485 loader = None 

1486 

1487 def __init__(self, module): 

1488 self.loader = getattr(module, '__loader__', None) 

1489 self.module_path = os.path.dirname(getattr(module, '__file__', '')) 

1490 

1491 def get_resource_filename(self, manager, resource_name): 

1492 return self._fn(self.module_path, resource_name) 

1493 

1494 def get_resource_stream(self, manager, resource_name): 

1495 return io.BytesIO(self.get_resource_string(manager, resource_name)) 

1496 

1497 def get_resource_string(self, manager, resource_name): 

1498 return self._get(self._fn(self.module_path, resource_name)) 

1499 

1500 def has_resource(self, resource_name): 

1501 return self._has(self._fn(self.module_path, resource_name)) 

1502 

1503 def _get_metadata_path(self, name): 

1504 return self._fn(self.egg_info, name) 

1505 

1506 def has_metadata(self, name): 

1507 if not self.egg_info: 

1508 return self.egg_info 

1509 

1510 path = self._get_metadata_path(name) 

1511 return self._has(path) 

1512 

1513 def get_metadata(self, name): 

1514 if not self.egg_info: 

1515 return "" 

1516 path = self._get_metadata_path(name) 

1517 value = self._get(path) 

1518 try: 

1519 return value.decode('utf-8') 

1520 except UnicodeDecodeError as exc: 

1521 # Include the path in the error message to simplify 

1522 # troubleshooting, and without changing the exception type. 

1523 exc.reason += ' in {} file at path: {}'.format(name, path) 

1524 raise 

1525 

1526 def get_metadata_lines(self, name): 

1527 return yield_lines(self.get_metadata(name)) 

1528 

1529 def resource_isdir(self, resource_name): 

1530 return self._isdir(self._fn(self.module_path, resource_name)) 

1531 

1532 def metadata_isdir(self, name): 

1533 return self.egg_info and self._isdir(self._fn(self.egg_info, name)) 

1534 

1535 def resource_listdir(self, resource_name): 

1536 return self._listdir(self._fn(self.module_path, resource_name)) 

1537 

1538 def metadata_listdir(self, name): 

1539 if self.egg_info: 

1540 return self._listdir(self._fn(self.egg_info, name)) 

1541 return [] 

1542 

1543 def run_script(self, script_name, namespace): 

1544 script = 'scripts/' + script_name 

1545 if not self.has_metadata(script): 

1546 raise ResolutionError( 

1547 "Script {script!r} not found in metadata at {self.egg_info!r}".format( 

1548 **locals() 

1549 ), 

1550 ) 

1551 script_text = self.get_metadata(script).replace('\r\n', '\n') 

1552 script_text = script_text.replace('\r', '\n') 

1553 script_filename = self._fn(self.egg_info, script) 

1554 namespace['__file__'] = script_filename 

1555 if os.path.exists(script_filename): 

1556 with open(script_filename) as fid: 

1557 source = fid.read() 

1558 code = compile(source, script_filename, 'exec') 

1559 exec(code, namespace, namespace) 

1560 else: 

1561 from linecache import cache 

1562 

1563 cache[script_filename] = ( 

1564 len(script_text), 

1565 0, 

1566 script_text.split('\n'), 

1567 script_filename, 

1568 ) 

1569 script_code = compile(script_text, script_filename, 'exec') 

1570 exec(script_code, namespace, namespace) 

1571 

1572 def _has(self, path): 

1573 raise NotImplementedError( 

1574 "Can't perform this operation for unregistered loader type" 

1575 ) 

1576 

1577 def _isdir(self, path): 

1578 raise NotImplementedError( 

1579 "Can't perform this operation for unregistered loader type" 

1580 ) 

1581 

1582 def _listdir(self, path): 

1583 raise NotImplementedError( 

1584 "Can't perform this operation for unregistered loader type" 

1585 ) 

1586 

1587 def _fn(self, base, resource_name): 

1588 self._validate_resource_path(resource_name) 

1589 if resource_name: 

1590 return os.path.join(base, *resource_name.split('/')) 

1591 return base 

1592 

1593 @staticmethod 

1594 def _validate_resource_path(path): 

1595 """ 

1596 Validate the resource paths according to the docs. 

1597 https://setuptools.pypa.io/en/latest/pkg_resources.html#basic-resource-access 

1598 

1599 >>> warned = getfixture('recwarn') 

1600 >>> warnings.simplefilter('always') 

1601 >>> vrp = NullProvider._validate_resource_path 

1602 >>> vrp('foo/bar.txt') 

1603 >>> bool(warned) 

1604 False 

1605 >>> vrp('../foo/bar.txt') 

1606 >>> bool(warned) 

1607 True 

1608 >>> warned.clear() 

1609 >>> vrp('/foo/bar.txt') 

1610 >>> bool(warned) 

1611 True 

1612 >>> vrp('foo/../../bar.txt') 

1613 >>> bool(warned) 

1614 True 

1615 >>> warned.clear() 

1616 >>> vrp('foo/f../bar.txt') 

1617 >>> bool(warned) 

1618 False 

1619 

1620 Windows path separators are straight-up disallowed. 

1621 >>> vrp(r'\\foo/bar.txt') 

1622 Traceback (most recent call last): 

1623 ... 

1624 ValueError: Use of .. or absolute path in a resource path \ 

1625is not allowed. 

1626 

1627 >>> vrp(r'C:\\foo/bar.txt') 

1628 Traceback (most recent call last): 

1629 ... 

1630 ValueError: Use of .. or absolute path in a resource path \ 

1631is not allowed. 

1632 

1633 Blank values are allowed 

1634 

1635 >>> vrp('') 

1636 >>> bool(warned) 

1637 False 

1638 

1639 Non-string values are not. 

1640 

1641 >>> vrp(None) 

1642 Traceback (most recent call last): 

1643 ... 

1644 AttributeError: ... 

1645 """ 

1646 invalid = ( 

1647 os.path.pardir in path.split(posixpath.sep) 

1648 or posixpath.isabs(path) 

1649 or ntpath.isabs(path) 

1650 ) 

1651 if not invalid: 

1652 return 

1653 

1654 msg = "Use of .. or absolute path in a resource path is not allowed." 

1655 

1656 # Aggressively disallow Windows absolute paths 

1657 if ntpath.isabs(path) and not posixpath.isabs(path): 

1658 raise ValueError(msg) 

1659 

1660 # for compatibility, warn; in future 

1661 # raise ValueError(msg) 

1662 warnings.warn( 

1663 msg[:-1] + " and will raise exceptions in a future release.", 

1664 DeprecationWarning, 

1665 stacklevel=4, 

1666 ) 

1667 

1668 def _get(self, path): 

1669 if hasattr(self.loader, 'get_data'): 

1670 return self.loader.get_data(path) 

1671 raise NotImplementedError( 

1672 "Can't perform this operation for loaders without 'get_data()'" 

1673 ) 

1674 

1675 

1676register_loader_type(object, NullProvider) 

1677 

1678 

1679def _parents(path): 

1680 """ 

1681 yield all parents of path including path 

1682 """ 

1683 last = None 

1684 while path != last: 

1685 yield path 

1686 last = path 

1687 path, _ = os.path.split(path) 

1688 

1689 

1690class EggProvider(NullProvider): 

1691 """Provider based on a virtual filesystem""" 

1692 

1693 def __init__(self, module): 

1694 super().__init__(module) 

1695 self._setup_prefix() 

1696 

1697 def _setup_prefix(self): 

1698 # Assume that metadata may be nested inside a "basket" 

1699 # of multiple eggs and use module_path instead of .archive. 

1700 eggs = filter(_is_egg_path, _parents(self.module_path)) 

1701 egg = next(eggs, None) 

1702 egg and self._set_egg(egg) 

1703 

1704 def _set_egg(self, path): 

1705 self.egg_name = os.path.basename(path) 

1706 self.egg_info = os.path.join(path, 'EGG-INFO') 

1707 self.egg_root = path 

1708 

1709 

1710class DefaultProvider(EggProvider): 

1711 """Provides access to package resources in the filesystem""" 

1712 

1713 def _has(self, path): 

1714 return os.path.exists(path) 

1715 

1716 def _isdir(self, path): 

1717 return os.path.isdir(path) 

1718 

1719 def _listdir(self, path): 

1720 return os.listdir(path) 

1721 

1722 def get_resource_stream(self, manager, resource_name): 

1723 return open(self._fn(self.module_path, resource_name), 'rb') 

1724 

1725 def _get(self, path): 

1726 with open(path, 'rb') as stream: 

1727 return stream.read() 

1728 

1729 @classmethod 

1730 def _register(cls): 

1731 loader_names = ( 

1732 'SourceFileLoader', 

1733 'SourcelessFileLoader', 

1734 ) 

1735 for name in loader_names: 

1736 loader_cls = getattr(importlib_machinery, name, type(None)) 

1737 register_loader_type(loader_cls, cls) 

1738 

1739 

1740DefaultProvider._register() 

1741 

1742 

1743class EmptyProvider(NullProvider): 

1744 """Provider that returns nothing for all requests""" 

1745 

1746 module_path = None 

1747 

1748 _isdir = _has = lambda self, path: False 

1749 

1750 def _get(self, path): 

1751 return '' 

1752 

1753 def _listdir(self, path): 

1754 return [] 

1755 

1756 def __init__(self): 

1757 pass 

1758 

1759 

1760empty_provider = EmptyProvider() 

1761 

1762 

1763class ZipManifests(dict): 

1764 """ 

1765 zip manifest builder 

1766 """ 

1767 

1768 @classmethod 

1769 def build(cls, path): 

1770 """ 

1771 Build a dictionary similar to the zipimport directory 

1772 caches, except instead of tuples, store ZipInfo objects. 

1773 

1774 Use a platform-specific path separator (os.sep) for the path keys 

1775 for compatibility with pypy on Windows. 

1776 """ 

1777 with zipfile.ZipFile(path) as zfile: 

1778 items = ( 

1779 ( 

1780 name.replace('/', os.sep), 

1781 zfile.getinfo(name), 

1782 ) 

1783 for name in zfile.namelist() 

1784 ) 

1785 return dict(items) 

1786 

1787 load = build 

1788 

1789 

1790class MemoizedZipManifests(ZipManifests): 

1791 """ 

1792 Memoized zipfile manifests. 

1793 """ 

1794 

1795 manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime') 

1796 

1797 def load(self, path): 

1798 """ 

1799 Load a manifest at path or return a suitable manifest already loaded. 

1800 """ 

1801 path = os.path.normpath(path) 

1802 mtime = os.stat(path).st_mtime 

1803 

1804 if path not in self or self[path].mtime != mtime: 

1805 manifest = self.build(path) 

1806 self[path] = self.manifest_mod(manifest, mtime) 

1807 

1808 return self[path].manifest 

1809 

1810 

1811class ZipProvider(EggProvider): 

1812 """Resource support for zips and eggs""" 

1813 

1814 eagers = None 

1815 _zip_manifests = MemoizedZipManifests() 

1816 

1817 def __init__(self, module): 

1818 super().__init__(module) 

1819 self.zip_pre = self.loader.archive + os.sep 

1820 

1821 def _zipinfo_name(self, fspath): 

1822 # Convert a virtual filename (full path to file) into a zipfile subpath 

1823 # usable with the zipimport directory cache for our target archive 

1824 fspath = fspath.rstrip(os.sep) 

1825 if fspath == self.loader.archive: 

1826 return '' 

1827 if fspath.startswith(self.zip_pre): 

1828 return fspath[len(self.zip_pre) :] 

1829 raise AssertionError("%s is not a subpath of %s" % (fspath, self.zip_pre)) 

1830 

1831 def _parts(self, zip_path): 

1832 # Convert a zipfile subpath into an egg-relative path part list. 

1833 # pseudo-fs path 

1834 fspath = self.zip_pre + zip_path 

1835 if fspath.startswith(self.egg_root + os.sep): 

1836 return fspath[len(self.egg_root) + 1 :].split(os.sep) 

1837 raise AssertionError("%s is not a subpath of %s" % (fspath, self.egg_root)) 

1838 

1839 @property 

1840 def zipinfo(self): 

1841 return self._zip_manifests.load(self.loader.archive) 

1842 

1843 def get_resource_filename(self, manager, resource_name): 

1844 if not self.egg_name: 

1845 raise NotImplementedError( 

1846 "resource_filename() only supported for .egg, not .zip" 

1847 ) 

1848 # no need to lock for extraction, since we use temp names 

1849 zip_path = self._resource_to_zip(resource_name) 

1850 eagers = self._get_eager_resources() 

1851 if '/'.join(self._parts(zip_path)) in eagers: 

1852 for name in eagers: 

1853 self._extract_resource(manager, self._eager_to_zip(name)) 

1854 return self._extract_resource(manager, zip_path) 

1855 

1856 @staticmethod 

1857 def _get_date_and_size(zip_stat): 

1858 size = zip_stat.file_size 

1859 # ymdhms+wday, yday, dst 

1860 date_time = zip_stat.date_time + (0, 0, -1) 

1861 # 1980 offset already done 

1862 timestamp = time.mktime(date_time) 

1863 return timestamp, size 

1864 

1865 # FIXME: 'ZipProvider._extract_resource' is too complex (12) 

1866 def _extract_resource(self, manager, zip_path): # noqa: C901 

1867 if zip_path in self._index(): 

1868 for name in self._index()[zip_path]: 

1869 last = self._extract_resource(manager, os.path.join(zip_path, name)) 

1870 # return the extracted directory name 

1871 return os.path.dirname(last) 

1872 

1873 timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) 

1874 

1875 if not WRITE_SUPPORT: 

1876 raise IOError( 

1877 '"os.rename" and "os.unlink" are not supported ' 'on this platform' 

1878 ) 

1879 try: 

1880 real_path = manager.get_cache_path(self.egg_name, self._parts(zip_path)) 

1881 

1882 if self._is_current(real_path, zip_path): 

1883 return real_path 

1884 

1885 outf, tmpnam = _mkstemp( 

1886 ".$extract", 

1887 dir=os.path.dirname(real_path), 

1888 ) 

1889 os.write(outf, self.loader.get_data(zip_path)) 

1890 os.close(outf) 

1891 utime(tmpnam, (timestamp, timestamp)) 

1892 manager.postprocess(tmpnam, real_path) 

1893 

1894 try: 

1895 rename(tmpnam, real_path) 

1896 

1897 except os.error: 

1898 if os.path.isfile(real_path): 

1899 if self._is_current(real_path, zip_path): 

1900 # the file became current since it was checked above, 

1901 # so proceed. 

1902 return real_path 

1903 # Windows, del old file and retry 

1904 elif os.name == 'nt': 

1905 unlink(real_path) 

1906 rename(tmpnam, real_path) 

1907 return real_path 

1908 raise 

1909 

1910 except os.error: 

1911 # report a user-friendly error 

1912 manager.extraction_error() 

1913 

1914 return real_path 

1915 

1916 def _is_current(self, file_path, zip_path): 

1917 """ 

1918 Return True if the file_path is current for this zip_path 

1919 """ 

1920 timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) 

1921 if not os.path.isfile(file_path): 

1922 return False 

1923 stat = os.stat(file_path) 

1924 if stat.st_size != size or stat.st_mtime != timestamp: 

1925 return False 

1926 # check that the contents match 

1927 zip_contents = self.loader.get_data(zip_path) 

1928 with open(file_path, 'rb') as f: 

1929 file_contents = f.read() 

1930 return zip_contents == file_contents 

1931 

1932 def _get_eager_resources(self): 

1933 if self.eagers is None: 

1934 eagers = [] 

1935 for name in ('native_libs.txt', 'eager_resources.txt'): 

1936 if self.has_metadata(name): 

1937 eagers.extend(self.get_metadata_lines(name)) 

1938 self.eagers = eagers 

1939 return self.eagers 

1940 

1941 def _index(self): 

1942 try: 

1943 return self._dirindex 

1944 except AttributeError: 

1945 ind = {} 

1946 for path in self.zipinfo: 

1947 parts = path.split(os.sep) 

1948 while parts: 

1949 parent = os.sep.join(parts[:-1]) 

1950 if parent in ind: 

1951 ind[parent].append(parts[-1]) 

1952 break 

1953 else: 

1954 ind[parent] = [parts.pop()] 

1955 self._dirindex = ind 

1956 return ind 

1957 

1958 def _has(self, fspath): 

1959 zip_path = self._zipinfo_name(fspath) 

1960 return zip_path in self.zipinfo or zip_path in self._index() 

1961 

1962 def _isdir(self, fspath): 

1963 return self._zipinfo_name(fspath) in self._index() 

1964 

1965 def _listdir(self, fspath): 

1966 return list(self._index().get(self._zipinfo_name(fspath), ())) 

1967 

1968 def _eager_to_zip(self, resource_name): 

1969 return self._zipinfo_name(self._fn(self.egg_root, resource_name)) 

1970 

1971 def _resource_to_zip(self, resource_name): 

1972 return self._zipinfo_name(self._fn(self.module_path, resource_name)) 

1973 

1974 

1975register_loader_type(zipimport.zipimporter, ZipProvider) 

1976 

1977 

1978class FileMetadata(EmptyProvider): 

1979 """Metadata handler for standalone PKG-INFO files 

1980 

1981 Usage:: 

1982 

1983 metadata = FileMetadata("/path/to/PKG-INFO") 

1984 

1985 This provider rejects all data and metadata requests except for PKG-INFO, 

1986 which is treated as existing, and will be the contents of the file at 

1987 the provided location. 

1988 """ 

1989 

1990 def __init__(self, path): 

1991 self.path = path 

1992 

1993 def _get_metadata_path(self, name): 

1994 return self.path 

1995 

1996 def has_metadata(self, name): 

1997 return name == 'PKG-INFO' and os.path.isfile(self.path) 

1998 

1999 def get_metadata(self, name): 

2000 if name != 'PKG-INFO': 

2001 raise KeyError("No metadata except PKG-INFO is available") 

2002 

2003 with io.open(self.path, encoding='utf-8', errors="replace") as f: 

2004 metadata = f.read() 

2005 self._warn_on_replacement(metadata) 

2006 return metadata 

2007 

2008 def _warn_on_replacement(self, metadata): 

2009 replacement_char = '�' 

2010 if replacement_char in metadata: 

2011 tmpl = "{self.path} could not be properly decoded in UTF-8" 

2012 msg = tmpl.format(**locals()) 

2013 warnings.warn(msg) 

2014 

2015 def get_metadata_lines(self, name): 

2016 return yield_lines(self.get_metadata(name)) 

2017 

2018 

2019class PathMetadata(DefaultProvider): 

2020 """Metadata provider for egg directories 

2021 

2022 Usage:: 

2023 

2024 # Development eggs: 

2025 

2026 egg_info = "/path/to/PackageName.egg-info" 

2027 base_dir = os.path.dirname(egg_info) 

2028 metadata = PathMetadata(base_dir, egg_info) 

2029 dist_name = os.path.splitext(os.path.basename(egg_info))[0] 

2030 dist = Distribution(basedir, project_name=dist_name, metadata=metadata) 

2031 

2032 # Unpacked egg directories: 

2033 

2034 egg_path = "/path/to/PackageName-ver-pyver-etc.egg" 

2035 metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO')) 

2036 dist = Distribution.from_filename(egg_path, metadata=metadata) 

2037 """ 

2038 

2039 def __init__(self, path, egg_info): 

2040 self.module_path = path 

2041 self.egg_info = egg_info 

2042 

2043 

2044class EggMetadata(ZipProvider): 

2045 """Metadata provider for .egg files""" 

2046 

2047 def __init__(self, importer): 

2048 """Create a metadata provider from a zipimporter""" 

2049 

2050 self.zip_pre = importer.archive + os.sep 

2051 self.loader = importer 

2052 if importer.prefix: 

2053 self.module_path = os.path.join(importer.archive, importer.prefix) 

2054 else: 

2055 self.module_path = importer.archive 

2056 self._setup_prefix() 

2057 

2058 

2059_declare_state('dict', _distribution_finders={}) 

2060 

2061 

2062def register_finder(importer_type, distribution_finder): 

2063 """Register `distribution_finder` to find distributions in sys.path items 

2064 

2065 `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item 

2066 handler), and `distribution_finder` is a callable that, passed a path 

2067 item and the importer instance, yields ``Distribution`` instances found on 

2068 that path item. See ``pkg_resources.find_on_path`` for an example.""" 

2069 _distribution_finders[importer_type] = distribution_finder 

2070 

2071 

2072def find_distributions(path_item, only=False): 

2073 """Yield distributions accessible via `path_item`""" 

2074 importer = get_importer(path_item) 

2075 finder = _find_adapter(_distribution_finders, importer) 

2076 return finder(importer, path_item, only) 

2077 

2078 

2079def find_eggs_in_zip(importer, path_item, only=False): 

2080 """ 

2081 Find eggs in zip files; possibly multiple nested eggs. 

2082 """ 

2083 if importer.archive.endswith('.whl'): 

2084 # wheels are not supported with this finder 

2085 # they don't have PKG-INFO metadata, and won't ever contain eggs 

2086 return 

2087 metadata = EggMetadata(importer) 

2088 if metadata.has_metadata('PKG-INFO'): 

2089 yield Distribution.from_filename(path_item, metadata=metadata) 

2090 if only: 

2091 # don't yield nested distros 

2092 return 

2093 for subitem in metadata.resource_listdir(''): 

2094 if _is_egg_path(subitem): 

2095 subpath = os.path.join(path_item, subitem) 

2096 dists = find_eggs_in_zip(zipimport.zipimporter(subpath), subpath) 

2097 for dist in dists: 

2098 yield dist 

2099 elif subitem.lower().endswith(('.dist-info', '.egg-info')): 

2100 subpath = os.path.join(path_item, subitem) 

2101 submeta = EggMetadata(zipimport.zipimporter(subpath)) 

2102 submeta.egg_info = subpath 

2103 yield Distribution.from_location(path_item, subitem, submeta) 

2104 

2105 

2106register_finder(zipimport.zipimporter, find_eggs_in_zip) 

2107 

2108 

2109def find_nothing(importer, path_item, only=False): 

2110 return () 

2111 

2112 

2113register_finder(object, find_nothing) 

2114 

2115 

2116def find_on_path(importer, path_item, only=False): 

2117 """Yield distributions accessible on a sys.path directory""" 

2118 path_item = _normalize_cached(path_item) 

2119 

2120 if _is_unpacked_egg(path_item): 

2121 yield Distribution.from_filename( 

2122 path_item, 

2123 metadata=PathMetadata(path_item, os.path.join(path_item, 'EGG-INFO')), 

2124 ) 

2125 return 

2126 

2127 entries = (os.path.join(path_item, child) for child in safe_listdir(path_item)) 

2128 

2129 # scan for .egg and .egg-info in directory 

2130 for entry in sorted(entries): 

2131 fullpath = os.path.join(path_item, entry) 

2132 factory = dist_factory(path_item, entry, only) 

2133 for dist in factory(fullpath): 

2134 yield dist 

2135 

2136 

2137def dist_factory(path_item, entry, only): 

2138 """Return a dist_factory for the given entry.""" 

2139 lower = entry.lower() 

2140 is_egg_info = lower.endswith('.egg-info') 

2141 is_dist_info = lower.endswith('.dist-info') and os.path.isdir( 

2142 os.path.join(path_item, entry) 

2143 ) 

2144 is_meta = is_egg_info or is_dist_info 

2145 return ( 

2146 distributions_from_metadata 

2147 if is_meta 

2148 else find_distributions 

2149 if not only and _is_egg_path(entry) 

2150 else resolve_egg_link 

2151 if not only and lower.endswith('.egg-link') 

2152 else NoDists() 

2153 ) 

2154 

2155 

2156class NoDists: 

2157 """ 

2158 >>> bool(NoDists()) 

2159 False 

2160 

2161 >>> list(NoDists()('anything')) 

2162 [] 

2163 """ 

2164 

2165 def __bool__(self): 

2166 return False 

2167 

2168 def __call__(self, fullpath): 

2169 return iter(()) 

2170 

2171 

2172def safe_listdir(path): 

2173 """ 

2174 Attempt to list contents of path, but suppress some exceptions. 

2175 """ 

2176 try: 

2177 return os.listdir(path) 

2178 except (PermissionError, NotADirectoryError): 

2179 pass 

2180 except OSError as e: 

2181 # Ignore the directory if does not exist, not a directory or 

2182 # permission denied 

2183 if e.errno not in (errno.ENOTDIR, errno.EACCES, errno.ENOENT): 

2184 raise 

2185 return () 

2186 

2187 

2188def distributions_from_metadata(path): 

2189 root = os.path.dirname(path) 

2190 if os.path.isdir(path): 

2191 if len(os.listdir(path)) == 0: 

2192 # empty metadata dir; skip 

2193 return 

2194 metadata = PathMetadata(root, path) 

2195 else: 

2196 metadata = FileMetadata(path) 

2197 entry = os.path.basename(path) 

2198 yield Distribution.from_location( 

2199 root, 

2200 entry, 

2201 metadata, 

2202 precedence=DEVELOP_DIST, 

2203 ) 

2204 

2205 

2206def non_empty_lines(path): 

2207 """ 

2208 Yield non-empty lines from file at path 

2209 """ 

2210 with open(path) as f: 

2211 for line in f: 

2212 line = line.strip() 

2213 if line: 

2214 yield line 

2215 

2216 

2217def resolve_egg_link(path): 

2218 """ 

2219 Given a path to an .egg-link, resolve distributions 

2220 present in the referenced path. 

2221 """ 

2222 referenced_paths = non_empty_lines(path) 

2223 resolved_paths = ( 

2224 os.path.join(os.path.dirname(path), ref) for ref in referenced_paths 

2225 ) 

2226 dist_groups = map(find_distributions, resolved_paths) 

2227 return next(dist_groups, ()) 

2228 

2229 

2230if hasattr(pkgutil, 'ImpImporter'): 

2231 register_finder(pkgutil.ImpImporter, find_on_path) 

2232 

2233register_finder(importlib_machinery.FileFinder, find_on_path) 

2234 

2235_declare_state('dict', _namespace_handlers={}) 

2236_declare_state('dict', _namespace_packages={}) 

2237 

2238 

2239def register_namespace_handler(importer_type, namespace_handler): 

2240 """Register `namespace_handler` to declare namespace packages 

2241 

2242 `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item 

2243 handler), and `namespace_handler` is a callable like this:: 

2244 

2245 def namespace_handler(importer, path_entry, moduleName, module): 

2246 # return a path_entry to use for child packages 

2247 

2248 Namespace handlers are only called if the importer object has already 

2249 agreed that it can handle the relevant path item, and they should only 

2250 return a subpath if the module __path__ does not already contain an 

2251 equivalent subpath. For an example namespace handler, see 

2252 ``pkg_resources.file_ns_handler``. 

2253 """ 

2254 _namespace_handlers[importer_type] = namespace_handler 

2255 

2256 

2257def _handle_ns(packageName, path_item): 

2258 """Ensure that named package includes a subpath of path_item (if needed)""" 

2259 

2260 importer = get_importer(path_item) 

2261 if importer is None: 

2262 return None 

2263 

2264 # use find_spec (PEP 451) and fall-back to find_module (PEP 302) 

2265 try: 

2266 spec = importer.find_spec(packageName) 

2267 except AttributeError: 

2268 # capture warnings due to #1111 

2269 with warnings.catch_warnings(): 

2270 warnings.simplefilter("ignore") 

2271 loader = importer.find_module(packageName) 

2272 else: 

2273 loader = spec.loader if spec else None 

2274 

2275 if loader is None: 

2276 return None 

2277 module = sys.modules.get(packageName) 

2278 if module is None: 

2279 module = sys.modules[packageName] = types.ModuleType(packageName) 

2280 module.__path__ = [] 

2281 _set_parent_ns(packageName) 

2282 elif not hasattr(module, '__path__'): 

2283 raise TypeError("Not a package:", packageName) 

2284 handler = _find_adapter(_namespace_handlers, importer) 

2285 subpath = handler(importer, path_item, packageName, module) 

2286 if subpath is not None: 

2287 path = module.__path__ 

2288 path.append(subpath) 

2289 importlib.import_module(packageName) 

2290 _rebuild_mod_path(path, packageName, module) 

2291 return subpath 

2292 

2293 

2294def _rebuild_mod_path(orig_path, package_name, module): 

2295 """ 

2296 Rebuild module.__path__ ensuring that all entries are ordered 

2297 corresponding to their sys.path order 

2298 """ 

2299 sys_path = [_normalize_cached(p) for p in sys.path] 

2300 

2301 def safe_sys_path_index(entry): 

2302 """ 

2303 Workaround for #520 and #513. 

2304 """ 

2305 try: 

2306 return sys_path.index(entry) 

2307 except ValueError: 

2308 return float('inf') 

2309 

2310 def position_in_sys_path(path): 

2311 """ 

2312 Return the ordinal of the path based on its position in sys.path 

2313 """ 

2314 path_parts = path.split(os.sep) 

2315 module_parts = package_name.count('.') + 1 

2316 parts = path_parts[:-module_parts] 

2317 return safe_sys_path_index(_normalize_cached(os.sep.join(parts))) 

2318 

2319 new_path = sorted(orig_path, key=position_in_sys_path) 

2320 new_path = [_normalize_cached(p) for p in new_path] 

2321 

2322 if isinstance(module.__path__, list): 

2323 module.__path__[:] = new_path 

2324 else: 

2325 module.__path__ = new_path 

2326 

2327 

2328def declare_namespace(packageName): 

2329 """Declare that package 'packageName' is a namespace package""" 

2330 

2331 msg = ( 

2332 f"Deprecated call to `pkg_resources.declare_namespace({packageName!r})`.\n" 

2333 "Implementing implicit namespace packages (as specified in PEP 420) " 

2334 "is preferred to `pkg_resources.declare_namespace`. " 

2335 "See https://setuptools.pypa.io/en/latest/references/" 

2336 "keywords.html#keyword-namespace-packages" 

2337 ) 

2338 warnings.warn(msg, DeprecationWarning, stacklevel=2) 

2339 

2340 _imp.acquire_lock() 

2341 try: 

2342 if packageName in _namespace_packages: 

2343 return 

2344 

2345 path = sys.path 

2346 parent, _, _ = packageName.rpartition('.') 

2347 

2348 if parent: 

2349 declare_namespace(parent) 

2350 if parent not in _namespace_packages: 

2351 __import__(parent) 

2352 try: 

2353 path = sys.modules[parent].__path__ 

2354 except AttributeError as e: 

2355 raise TypeError("Not a package:", parent) from e 

2356 

2357 # Track what packages are namespaces, so when new path items are added, 

2358 # they can be updated 

2359 _namespace_packages.setdefault(parent or None, []).append(packageName) 

2360 _namespace_packages.setdefault(packageName, []) 

2361 

2362 for path_item in path: 

2363 # Ensure all the parent's path items are reflected in the child, 

2364 # if they apply 

2365 _handle_ns(packageName, path_item) 

2366 

2367 finally: 

2368 _imp.release_lock() 

2369 

2370 

2371def fixup_namespace_packages(path_item, parent=None): 

2372 """Ensure that previously-declared namespace packages include path_item""" 

2373 _imp.acquire_lock() 

2374 try: 

2375 for package in _namespace_packages.get(parent, ()): 

2376 subpath = _handle_ns(package, path_item) 

2377 if subpath: 

2378 fixup_namespace_packages(subpath, package) 

2379 finally: 

2380 _imp.release_lock() 

2381 

2382 

2383def file_ns_handler(importer, path_item, packageName, module): 

2384 """Compute an ns-package subpath for a filesystem or zipfile importer""" 

2385 

2386 subpath = os.path.join(path_item, packageName.split('.')[-1]) 

2387 normalized = _normalize_cached(subpath) 

2388 for item in module.__path__: 

2389 if _normalize_cached(item) == normalized: 

2390 break 

2391 else: 

2392 # Only return the path if it's not already there 

2393 return subpath 

2394 

2395 

2396if hasattr(pkgutil, 'ImpImporter'): 

2397 register_namespace_handler(pkgutil.ImpImporter, file_ns_handler) 

2398 

2399register_namespace_handler(zipimport.zipimporter, file_ns_handler) 

2400register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler) 

2401 

2402 

2403def null_ns_handler(importer, path_item, packageName, module): 

2404 return None 

2405 

2406 

2407register_namespace_handler(object, null_ns_handler) 

2408 

2409 

2410def normalize_path(filename): 

2411 """Normalize a file/dir name for comparison purposes""" 

2412 return os.path.normcase(os.path.realpath(os.path.normpath(_cygwin_patch(filename)))) 

2413 

2414 

2415def _cygwin_patch(filename): # pragma: nocover 

2416 """ 

2417 Contrary to POSIX 2008, on Cygwin, getcwd (3) contains 

2418 symlink components. Using 

2419 os.path.abspath() works around this limitation. A fix in os.getcwd() 

2420 would probably better, in Cygwin even more so, except 

2421 that this seems to be by design... 

2422 """ 

2423 return os.path.abspath(filename) if sys.platform == 'cygwin' else filename 

2424 

2425 

2426def _normalize_cached(filename, _cache={}): 

2427 try: 

2428 return _cache[filename] 

2429 except KeyError: 

2430 _cache[filename] = result = normalize_path(filename) 

2431 return result 

2432 

2433 

2434def _is_egg_path(path): 

2435 """ 

2436 Determine if given path appears to be an egg. 

2437 """ 

2438 return _is_zip_egg(path) or _is_unpacked_egg(path) 

2439 

2440 

2441def _is_zip_egg(path): 

2442 return ( 

2443 path.lower().endswith('.egg') 

2444 and os.path.isfile(path) 

2445 and zipfile.is_zipfile(path) 

2446 ) 

2447 

2448 

2449def _is_unpacked_egg(path): 

2450 """ 

2451 Determine if given path appears to be an unpacked egg. 

2452 """ 

2453 return path.lower().endswith('.egg') and os.path.isfile( 

2454 os.path.join(path, 'EGG-INFO', 'PKG-INFO') 

2455 ) 

2456 

2457 

2458def _set_parent_ns(packageName): 

2459 parts = packageName.split('.') 

2460 name = parts.pop() 

2461 if parts: 

2462 parent = '.'.join(parts) 

2463 setattr(sys.modules[parent], name, sys.modules[packageName]) 

2464 

2465 

2466MODULE = re.compile(r"\w+(\.\w+)*$").match 

2467EGG_NAME = re.compile( 

2468 r""" 

2469 (?P<name>[^-]+) ( 

2470 -(?P<ver>[^-]+) ( 

2471 -py(?P<pyver>[^-]+) ( 

2472 -(?P<plat>.+) 

2473 )? 

2474 )? 

2475 )? 

2476 """, 

2477 re.VERBOSE | re.IGNORECASE, 

2478).match 

2479 

2480 

2481class EntryPoint: 

2482 """Object representing an advertised importable object""" 

2483 

2484 def __init__(self, name, module_name, attrs=(), extras=(), dist=None): 

2485 if not MODULE(module_name): 

2486 raise ValueError("Invalid module name", module_name) 

2487 self.name = name 

2488 self.module_name = module_name 

2489 self.attrs = tuple(attrs) 

2490 self.extras = tuple(extras) 

2491 self.dist = dist 

2492 

2493 def __str__(self): 

2494 s = "%s = %s" % (self.name, self.module_name) 

2495 if self.attrs: 

2496 s += ':' + '.'.join(self.attrs) 

2497 if self.extras: 

2498 s += ' [%s]' % ','.join(self.extras) 

2499 return s 

2500 

2501 def __repr__(self): 

2502 return "EntryPoint.parse(%r)" % str(self) 

2503 

2504 def load(self, require=True, *args, **kwargs): 

2505 """ 

2506 Require packages for this EntryPoint, then resolve it. 

2507 """ 

2508 if not require or args or kwargs: 

2509 warnings.warn( 

2510 "Parameters to load are deprecated. Call .resolve and " 

2511 ".require separately.", 

2512 PkgResourcesDeprecationWarning, 

2513 stacklevel=2, 

2514 ) 

2515 if require: 

2516 self.require(*args, **kwargs) 

2517 return self.resolve() 

2518 

2519 def resolve(self): 

2520 """ 

2521 Resolve the entry point from its module and attrs. 

2522 """ 

2523 module = __import__(self.module_name, fromlist=['__name__'], level=0) 

2524 try: 

2525 return functools.reduce(getattr, self.attrs, module) 

2526 except AttributeError as exc: 

2527 raise ImportError(str(exc)) from exc 

2528 

2529 def require(self, env=None, installer=None): 

2530 if self.extras and not self.dist: 

2531 raise UnknownExtra("Can't require() without a distribution", self) 

2532 

2533 # Get the requirements for this entry point with all its extras and 

2534 # then resolve them. We have to pass `extras` along when resolving so 

2535 # that the working set knows what extras we want. Otherwise, for 

2536 # dist-info distributions, the working set will assume that the 

2537 # requirements for that extra are purely optional and skip over them. 

2538 reqs = self.dist.requires(self.extras) 

2539 items = working_set.resolve(reqs, env, installer, extras=self.extras) 

2540 list(map(working_set.add, items)) 

2541 

2542 pattern = re.compile( 

2543 r'\s*' 

2544 r'(?P<name>.+?)\s*' 

2545 r'=\s*' 

2546 r'(?P<module>[\w.]+)\s*' 

2547 r'(:\s*(?P<attr>[\w.]+))?\s*' 

2548 r'(?P<extras>\[.*\])?\s*$' 

2549 ) 

2550 

2551 @classmethod 

2552 def parse(cls, src, dist=None): 

2553 """Parse a single entry point from string `src` 

2554 

2555 Entry point syntax follows the form:: 

2556 

2557 name = some.module:some.attr [extra1, extra2] 

2558 

2559 The entry name and module name are required, but the ``:attrs`` and 

2560 ``[extras]`` parts are optional 

2561 """ 

2562 m = cls.pattern.match(src) 

2563 if not m: 

2564 msg = "EntryPoint must be in 'name=module:attrs [extras]' format" 

2565 raise ValueError(msg, src) 

2566 res = m.groupdict() 

2567 extras = cls._parse_extras(res['extras']) 

2568 attrs = res['attr'].split('.') if res['attr'] else () 

2569 return cls(res['name'], res['module'], attrs, extras, dist) 

2570 

2571 @classmethod 

2572 def _parse_extras(cls, extras_spec): 

2573 if not extras_spec: 

2574 return () 

2575 req = Requirement.parse('x' + extras_spec) 

2576 if req.specs: 

2577 raise ValueError() 

2578 return req.extras 

2579 

2580 @classmethod 

2581 def parse_group(cls, group, lines, dist=None): 

2582 """Parse an entry point group""" 

2583 if not MODULE(group): 

2584 raise ValueError("Invalid group name", group) 

2585 this = {} 

2586 for line in yield_lines(lines): 

2587 ep = cls.parse(line, dist) 

2588 if ep.name in this: 

2589 raise ValueError("Duplicate entry point", group, ep.name) 

2590 this[ep.name] = ep 

2591 return this 

2592 

2593 @classmethod 

2594 def parse_map(cls, data, dist=None): 

2595 """Parse a map of entry point groups""" 

2596 if isinstance(data, dict): 

2597 data = data.items() 

2598 else: 

2599 data = split_sections(data) 

2600 maps = {} 

2601 for group, lines in data: 

2602 if group is None: 

2603 if not lines: 

2604 continue 

2605 raise ValueError("Entry points must be listed in groups") 

2606 group = group.strip() 

2607 if group in maps: 

2608 raise ValueError("Duplicate group name", group) 

2609 maps[group] = cls.parse_group(group, lines, dist) 

2610 return maps 

2611 

2612 

2613def _version_from_file(lines): 

2614 """ 

2615 Given an iterable of lines from a Metadata file, return 

2616 the value of the Version field, if present, or None otherwise. 

2617 """ 

2618 

2619 def is_version_line(line): 

2620 return line.lower().startswith('version:') 

2621 

2622 version_lines = filter(is_version_line, lines) 

2623 line = next(iter(version_lines), '') 

2624 _, _, value = line.partition(':') 

2625 return safe_version(value.strip()) or None 

2626 

2627 

2628class Distribution: 

2629 """Wrap an actual or potential sys.path entry w/metadata""" 

2630 

2631 PKG_INFO = 'PKG-INFO' 

2632 

2633 def __init__( 

2634 self, 

2635 location=None, 

2636 metadata=None, 

2637 project_name=None, 

2638 version=None, 

2639 py_version=PY_MAJOR, 

2640 platform=None, 

2641 precedence=EGG_DIST, 

2642 ): 

2643 self.project_name = safe_name(project_name or 'Unknown') 

2644 if version is not None: 

2645 self._version = safe_version(version) 

2646 self.py_version = py_version 

2647 self.platform = platform 

2648 self.location = location 

2649 self.precedence = precedence 

2650 self._provider = metadata or empty_provider 

2651 

2652 @classmethod 

2653 def from_location(cls, location, basename, metadata=None, **kw): 

2654 project_name, version, py_version, platform = [None] * 4 

2655 basename, ext = os.path.splitext(basename) 

2656 if ext.lower() in _distributionImpl: 

2657 cls = _distributionImpl[ext.lower()] 

2658 

2659 match = EGG_NAME(basename) 

2660 if match: 

2661 project_name, version, py_version, platform = match.group( 

2662 'name', 'ver', 'pyver', 'plat' 

2663 ) 

2664 return cls( 

2665 location, 

2666 metadata, 

2667 project_name=project_name, 

2668 version=version, 

2669 py_version=py_version, 

2670 platform=platform, 

2671 **kw, 

2672 )._reload_version() 

2673 

2674 def _reload_version(self): 

2675 return self 

2676 

2677 @property 

2678 def hashcmp(self): 

2679 return ( 

2680 self._forgiving_parsed_version, 

2681 self.precedence, 

2682 self.key, 

2683 self.location, 

2684 self.py_version or '', 

2685 self.platform or '', 

2686 ) 

2687 

2688 def __hash__(self): 

2689 return hash(self.hashcmp) 

2690 

2691 def __lt__(self, other): 

2692 return self.hashcmp < other.hashcmp 

2693 

2694 def __le__(self, other): 

2695 return self.hashcmp <= other.hashcmp 

2696 

2697 def __gt__(self, other): 

2698 return self.hashcmp > other.hashcmp 

2699 

2700 def __ge__(self, other): 

2701 return self.hashcmp >= other.hashcmp 

2702 

2703 def __eq__(self, other): 

2704 if not isinstance(other, self.__class__): 

2705 # It's not a Distribution, so they are not equal 

2706 return False 

2707 return self.hashcmp == other.hashcmp 

2708 

2709 def __ne__(self, other): 

2710 return not self == other 

2711 

2712 # These properties have to be lazy so that we don't have to load any 

2713 # metadata until/unless it's actually needed. (i.e., some distributions 

2714 # may not know their name or version without loading PKG-INFO) 

2715 

2716 @property 

2717 def key(self): 

2718 try: 

2719 return self._key 

2720 except AttributeError: 

2721 self._key = key = self.project_name.lower() 

2722 return key 

2723 

2724 @property 

2725 def parsed_version(self): 

2726 if not hasattr(self, "_parsed_version"): 

2727 try: 

2728 self._parsed_version = parse_version(self.version) 

2729 except packaging.version.InvalidVersion as ex: 

2730 info = f"(package: {self.project_name})" 

2731 if hasattr(ex, "add_note"): 

2732 ex.add_note(info) # PEP 678 

2733 raise 

2734 raise packaging.version.InvalidVersion(f"{str(ex)} {info}") from None 

2735 

2736 return self._parsed_version 

2737 

2738 @property 

2739 def _forgiving_parsed_version(self): 

2740 try: 

2741 return self.parsed_version 

2742 except packaging.version.InvalidVersion as ex: 

2743 self._parsed_version = parse_version(_forgiving_version(self.version)) 

2744 

2745 notes = "\n".join(getattr(ex, "__notes__", [])) # PEP 678 

2746 msg = f"""!!\n\n 

2747 ************************************************************************* 

2748 {str(ex)}\n{notes} 

2749 

2750 This is a long overdue deprecation. 

2751 For the time being, `pkg_resources` will use `{self._parsed_version}` 

2752 as a replacement to avoid breaking existing environments, 

2753 but no future compatibility is guaranteed. 

2754 

2755 If you maintain package {self.project_name} you should implement 

2756 the relevant changes to adequate the project to PEP 440 immediately. 

2757 ************************************************************************* 

2758 \n\n!! 

2759 """ 

2760 warnings.warn(msg, DeprecationWarning) 

2761 

2762 return self._parsed_version 

2763 

2764 @property 

2765 def version(self): 

2766 try: 

2767 return self._version 

2768 except AttributeError as e: 

2769 version = self._get_version() 

2770 if version is None: 

2771 path = self._get_metadata_path_for_display(self.PKG_INFO) 

2772 msg = ("Missing 'Version:' header and/or {} file at path: {}").format( 

2773 self.PKG_INFO, path 

2774 ) 

2775 raise ValueError(msg, self) from e 

2776 

2777 return version 

2778 

2779 @property 

2780 def _dep_map(self): 

2781 """ 

2782 A map of extra to its list of (direct) requirements 

2783 for this distribution, including the null extra. 

2784 """ 

2785 try: 

2786 return self.__dep_map 

2787 except AttributeError: 

2788 self.__dep_map = self._filter_extras(self._build_dep_map()) 

2789 return self.__dep_map 

2790 

2791 @staticmethod 

2792 def _filter_extras(dm): 

2793 """ 

2794 Given a mapping of extras to dependencies, strip off 

2795 environment markers and filter out any dependencies 

2796 not matching the markers. 

2797 """ 

2798 for extra in list(filter(None, dm)): 

2799 new_extra = extra 

2800 reqs = dm.pop(extra) 

2801 new_extra, _, marker = extra.partition(':') 

2802 fails_marker = marker and ( 

2803 invalid_marker(marker) or not evaluate_marker(marker) 

2804 ) 

2805 if fails_marker: 

2806 reqs = [] 

2807 new_extra = safe_extra(new_extra) or None 

2808 

2809 dm.setdefault(new_extra, []).extend(reqs) 

2810 return dm 

2811 

2812 def _build_dep_map(self): 

2813 dm = {} 

2814 for name in 'requires.txt', 'depends.txt': 

2815 for extra, reqs in split_sections(self._get_metadata(name)): 

2816 dm.setdefault(extra, []).extend(parse_requirements(reqs)) 

2817 return dm 

2818 

2819 def requires(self, extras=()): 

2820 """List of Requirements needed for this distro if `extras` are used""" 

2821 dm = self._dep_map 

2822 deps = [] 

2823 deps.extend(dm.get(None, ())) 

2824 for ext in extras: 

2825 try: 

2826 deps.extend(dm[safe_extra(ext)]) 

2827 except KeyError as e: 

2828 raise UnknownExtra( 

2829 "%s has no such extra feature %r" % (self, ext) 

2830 ) from e 

2831 return deps 

2832 

2833 def _get_metadata_path_for_display(self, name): 

2834 """ 

2835 Return the path to the given metadata file, if available. 

2836 """ 

2837 try: 

2838 # We need to access _get_metadata_path() on the provider object 

2839 # directly rather than through this class's __getattr__() 

2840 # since _get_metadata_path() is marked private. 

2841 path = self._provider._get_metadata_path(name) 

2842 

2843 # Handle exceptions e.g. in case the distribution's metadata 

2844 # provider doesn't support _get_metadata_path(). 

2845 except Exception: 

2846 return '[could not detect]' 

2847 

2848 return path 

2849 

2850 def _get_metadata(self, name): 

2851 if self.has_metadata(name): 

2852 for line in self.get_metadata_lines(name): 

2853 yield line 

2854 

2855 def _get_version(self): 

2856 lines = self._get_metadata(self.PKG_INFO) 

2857 version = _version_from_file(lines) 

2858 

2859 return version 

2860 

2861 def activate(self, path=None, replace=False): 

2862 """Ensure distribution is importable on `path` (default=sys.path)""" 

2863 if path is None: 

2864 path = sys.path 

2865 self.insert_on(path, replace=replace) 

2866 if path is sys.path: 

2867 fixup_namespace_packages(self.location) 

2868 for pkg in self._get_metadata('namespace_packages.txt'): 

2869 if pkg in sys.modules: 

2870 declare_namespace(pkg) 

2871 

2872 def egg_name(self): 

2873 """Return what this distribution's standard .egg filename should be""" 

2874 filename = "%s-%s-py%s" % ( 

2875 to_filename(self.project_name), 

2876 to_filename(self.version), 

2877 self.py_version or PY_MAJOR, 

2878 ) 

2879 

2880 if self.platform: 

2881 filename += '-' + self.platform 

2882 return filename 

2883 

2884 def __repr__(self): 

2885 if self.location: 

2886 return "%s (%s)" % (self, self.location) 

2887 else: 

2888 return str(self) 

2889 

2890 def __str__(self): 

2891 try: 

2892 version = getattr(self, 'version', None) 

2893 except ValueError: 

2894 version = None 

2895 version = version or "[unknown version]" 

2896 return "%s %s" % (self.project_name, version) 

2897 

2898 def __getattr__(self, attr): 

2899 """Delegate all unrecognized public attributes to .metadata provider""" 

2900 if attr.startswith('_'): 

2901 raise AttributeError(attr) 

2902 return getattr(self._provider, attr) 

2903 

2904 def __dir__(self): 

2905 return list( 

2906 set(super(Distribution, self).__dir__()) 

2907 | set(attr for attr in self._provider.__dir__() if not attr.startswith('_')) 

2908 ) 

2909 

2910 @classmethod 

2911 def from_filename(cls, filename, metadata=None, **kw): 

2912 return cls.from_location( 

2913 _normalize_cached(filename), os.path.basename(filename), metadata, **kw 

2914 ) 

2915 

2916 def as_requirement(self): 

2917 """Return a ``Requirement`` that matches this distribution exactly""" 

2918 if isinstance(self.parsed_version, packaging.version.Version): 

2919 spec = "%s==%s" % (self.project_name, self.parsed_version) 

2920 else: 

2921 spec = "%s===%s" % (self.project_name, self.parsed_version) 

2922 

2923 return Requirement.parse(spec) 

2924 

2925 def load_entry_point(self, group, name): 

2926 """Return the `name` entry point of `group` or raise ImportError""" 

2927 ep = self.get_entry_info(group, name) 

2928 if ep is None: 

2929 raise ImportError("Entry point %r not found" % ((group, name),)) 

2930 return ep.load() 

2931 

2932 def get_entry_map(self, group=None): 

2933 """Return the entry point map for `group`, or the full entry map""" 

2934 try: 

2935 ep_map = self._ep_map 

2936 except AttributeError: 

2937 ep_map = self._ep_map = EntryPoint.parse_map( 

2938 self._get_metadata('entry_points.txt'), self 

2939 ) 

2940 if group is not None: 

2941 return ep_map.get(group, {}) 

2942 return ep_map 

2943 

2944 def get_entry_info(self, group, name): 

2945 """Return the EntryPoint object for `group`+`name`, or ``None``""" 

2946 return self.get_entry_map(group).get(name) 

2947 

2948 # FIXME: 'Distribution.insert_on' is too complex (13) 

2949 def insert_on(self, path, loc=None, replace=False): # noqa: C901 

2950 """Ensure self.location is on path 

2951 

2952 If replace=False (default): 

2953 - If location is already in path anywhere, do nothing. 

2954 - Else: 

2955 - If it's an egg and its parent directory is on path, 

2956 insert just ahead of the parent. 

2957 - Else: add to the end of path. 

2958 If replace=True: 

2959 - If location is already on path anywhere (not eggs) 

2960 or higher priority than its parent (eggs) 

2961 do nothing. 

2962 - Else: 

2963 - If it's an egg and its parent directory is on path, 

2964 insert just ahead of the parent, 

2965 removing any lower-priority entries. 

2966 - Else: add it to the front of path. 

2967 """ 

2968 

2969 loc = loc or self.location 

2970 if not loc: 

2971 return 

2972 

2973 nloc = _normalize_cached(loc) 

2974 bdir = os.path.dirname(nloc) 

2975 npath = [(p and _normalize_cached(p) or p) for p in path] 

2976 

2977 for p, item in enumerate(npath): 

2978 if item == nloc: 

2979 if replace: 

2980 break 

2981 else: 

2982 # don't modify path (even removing duplicates) if 

2983 # found and not replace 

2984 return 

2985 elif item == bdir and self.precedence == EGG_DIST: 

2986 # if it's an .egg, give it precedence over its directory 

2987 # UNLESS it's already been added to sys.path and replace=False 

2988 if (not replace) and nloc in npath[p:]: 

2989 return 

2990 if path is sys.path: 

2991 self.check_version_conflict() 

2992 path.insert(p, loc) 

2993 npath.insert(p, nloc) 

2994 break 

2995 else: 

2996 if path is sys.path: 

2997 self.check_version_conflict() 

2998 if replace: 

2999 path.insert(0, loc) 

3000 else: 

3001 path.append(loc) 

3002 return 

3003 

3004 # p is the spot where we found or inserted loc; now remove duplicates 

3005 while True: 

3006 try: 

3007 np = npath.index(nloc, p + 1) 

3008 except ValueError: 

3009 break 

3010 else: 

3011 del npath[np], path[np] 

3012 # ha! 

3013 p = np 

3014 

3015 return 

3016 

3017 def check_version_conflict(self): 

3018 if self.key == 'setuptools': 

3019 # ignore the inevitable setuptools self-conflicts :( 

3020 return 

3021 

3022 nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt')) 

3023 loc = normalize_path(self.location) 

3024 for modname in self._get_metadata('top_level.txt'): 

3025 if ( 

3026 modname not in sys.modules 

3027 or modname in nsp 

3028 or modname in _namespace_packages 

3029 ): 

3030 continue 

3031 if modname in ('pkg_resources', 'setuptools', 'site'): 

3032 continue 

3033 fn = getattr(sys.modules[modname], '__file__', None) 

3034 if fn and ( 

3035 normalize_path(fn).startswith(loc) or fn.startswith(self.location) 

3036 ): 

3037 continue 

3038 issue_warning( 

3039 "Module %s was already imported from %s, but %s is being added" 

3040 " to sys.path" % (modname, fn, self.location), 

3041 ) 

3042 

3043 def has_version(self): 

3044 try: 

3045 self.version 

3046 except ValueError: 

3047 issue_warning("Unbuilt egg for " + repr(self)) 

3048 return False 

3049 except SystemError: 

3050 # TODO: remove this except clause when python/cpython#103632 is fixed. 

3051 return False 

3052 return True 

3053 

3054 def clone(self, **kw): 

3055 """Copy this distribution, substituting in any changed keyword args""" 

3056 names = 'project_name version py_version platform location precedence' 

3057 for attr in names.split(): 

3058 kw.setdefault(attr, getattr(self, attr, None)) 

3059 kw.setdefault('metadata', self._provider) 

3060 return self.__class__(**kw) 

3061 

3062 @property 

3063 def extras(self): 

3064 return [dep for dep in self._dep_map if dep] 

3065 

3066 

3067class EggInfoDistribution(Distribution): 

3068 def _reload_version(self): 

3069 """ 

3070 Packages installed by distutils (e.g. numpy or scipy), 

3071 which uses an old safe_version, and so 

3072 their version numbers can get mangled when 

3073 converted to filenames (e.g., 1.11.0.dev0+2329eae to 

3074 1.11.0.dev0_2329eae). These distributions will not be 

3075 parsed properly 

3076 downstream by Distribution and safe_version, so 

3077 take an extra step and try to get the version number from 

3078 the metadata file itself instead of the filename. 

3079 """ 

3080 md_version = self._get_version() 

3081 if md_version: 

3082 self._version = md_version 

3083 return self 

3084 

3085 

3086class DistInfoDistribution(Distribution): 

3087 """ 

3088 Wrap an actual or potential sys.path entry 

3089 w/metadata, .dist-info style. 

3090 """ 

3091 

3092 PKG_INFO = 'METADATA' 

3093 EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])") 

3094 

3095 @property 

3096 def _parsed_pkg_info(self): 

3097 """Parse and cache metadata""" 

3098 try: 

3099 return self._pkg_info 

3100 except AttributeError: 

3101 metadata = self.get_metadata(self.PKG_INFO) 

3102 self._pkg_info = email.parser.Parser().parsestr(metadata) 

3103 return self._pkg_info 

3104 

3105 @property 

3106 def _dep_map(self): 

3107 try: 

3108 return self.__dep_map 

3109 except AttributeError: 

3110 self.__dep_map = self._compute_dependencies() 

3111 return self.__dep_map 

3112 

3113 def _compute_dependencies(self): 

3114 """Recompute this distribution's dependencies.""" 

3115 dm = self.__dep_map = {None: []} 

3116 

3117 reqs = [] 

3118 # Including any condition expressions 

3119 for req in self._parsed_pkg_info.get_all('Requires-Dist') or []: 

3120 reqs.extend(parse_requirements(req)) 

3121 

3122 def reqs_for_extra(extra): 

3123 for req in reqs: 

3124 if not req.marker or req.marker.evaluate({'extra': extra}): 

3125 yield req 

3126 

3127 common = types.MappingProxyType(dict.fromkeys(reqs_for_extra(None))) 

3128 dm[None].extend(common) 

3129 

3130 for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []: 

3131 s_extra = safe_extra(extra.strip()) 

3132 dm[s_extra] = [r for r in reqs_for_extra(extra) if r not in common] 

3133 

3134 return dm 

3135 

3136 

3137_distributionImpl = { 

3138 '.egg': Distribution, 

3139 '.egg-info': EggInfoDistribution, 

3140 '.dist-info': DistInfoDistribution, 

3141} 

3142 

3143 

3144def issue_warning(*args, **kw): 

3145 level = 1 

3146 g = globals() 

3147 try: 

3148 # find the first stack frame that is *not* code in 

3149 # the pkg_resources module, to use for the warning 

3150 while sys._getframe(level).f_globals is g: 

3151 level += 1 

3152 except ValueError: 

3153 pass 

3154 warnings.warn(stacklevel=level + 1, *args, **kw) 

3155 

3156 

3157def parse_requirements(strs): 

3158 """ 

3159 Yield ``Requirement`` objects for each specification in `strs`. 

3160 

3161 `strs` must be a string, or a (possibly-nested) iterable thereof. 

3162 """ 

3163 return map(Requirement, join_continuation(map(drop_comment, yield_lines(strs)))) 

3164 

3165 

3166class RequirementParseError(packaging.requirements.InvalidRequirement): 

3167 "Compatibility wrapper for InvalidRequirement" 

3168 

3169 

3170class Requirement(packaging.requirements.Requirement): 

3171 def __init__(self, requirement_string): 

3172 """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!""" 

3173 super(Requirement, self).__init__(requirement_string) 

3174 self.unsafe_name = self.name 

3175 project_name = safe_name(self.name) 

3176 self.project_name, self.key = project_name, project_name.lower() 

3177 self.specs = [(spec.operator, spec.version) for spec in self.specifier] 

3178 self.extras = tuple(map(safe_extra, self.extras)) 

3179 self.hashCmp = ( 

3180 self.key, 

3181 self.url, 

3182 self.specifier, 

3183 frozenset(self.extras), 

3184 str(self.marker) if self.marker else None, 

3185 ) 

3186 self.__hash = hash(self.hashCmp) 

3187 

3188 def __eq__(self, other): 

3189 return isinstance(other, Requirement) and self.hashCmp == other.hashCmp 

3190 

3191 def __ne__(self, other): 

3192 return not self == other 

3193 

3194 def __contains__(self, item): 

3195 if isinstance(item, Distribution): 

3196 if item.key != self.key: 

3197 return False 

3198 

3199 item = item.version 

3200 

3201 # Allow prereleases always in order to match the previous behavior of 

3202 # this method. In the future this should be smarter and follow PEP 440 

3203 # more accurately. 

3204 return self.specifier.contains(item, prereleases=True) 

3205 

3206 def __hash__(self): 

3207 return self.__hash 

3208 

3209 def __repr__(self): 

3210 return "Requirement.parse(%r)" % str(self) 

3211 

3212 @staticmethod 

3213 def parse(s): 

3214 (req,) = parse_requirements(s) 

3215 return req 

3216 

3217 

3218def _always_object(classes): 

3219 """ 

3220 Ensure object appears in the mro even 

3221 for old-style classes. 

3222 """ 

3223 if object not in classes: 

3224 return classes + (object,) 

3225 return classes 

3226 

3227 

3228def _find_adapter(registry, ob): 

3229 """Return an adapter factory for `ob` from `registry`""" 

3230 types = _always_object(inspect.getmro(getattr(ob, '__class__', type(ob)))) 

3231 for t in types: 

3232 if t in registry: 

3233 return registry[t] 

3234 

3235 

3236def ensure_directory(path): 

3237 """Ensure that the parent directory of `path` exists""" 

3238 dirname = os.path.dirname(path) 

3239 os.makedirs(dirname, exist_ok=True) 

3240 

3241 

3242def _bypass_ensure_directory(path): 

3243 """Sandbox-bypassing version of ensure_directory()""" 

3244 if not WRITE_SUPPORT: 

3245 raise IOError('"os.mkdir" not supported on this platform.') 

3246 dirname, filename = split(path) 

3247 if dirname and filename and not isdir(dirname): 

3248 _bypass_ensure_directory(dirname) 

3249 try: 

3250 mkdir(dirname, 0o755) 

3251 except FileExistsError: 

3252 pass 

3253 

3254 

3255def split_sections(s): 

3256 """Split a string or iterable thereof into (section, content) pairs 

3257 

3258 Each ``section`` is a stripped version of the section header ("[section]") 

3259 and each ``content`` is a list of stripped lines excluding blank lines and 

3260 comment-only lines. If there are any such lines before the first section 

3261 header, they're returned in a first ``section`` of ``None``. 

3262 """ 

3263 section = None 

3264 content = [] 

3265 for line in yield_lines(s): 

3266 if line.startswith("["): 

3267 if line.endswith("]"): 

3268 if section or content: 

3269 yield section, content 

3270 section = line[1:-1].strip() 

3271 content = [] 

3272 else: 

3273 raise ValueError("Invalid section heading", line) 

3274 else: 

3275 content.append(line) 

3276 

3277 # wrap up last segment 

3278 yield section, content 

3279 

3280 

3281def _mkstemp(*args, **kw): 

3282 old_open = os.open 

3283 try: 

3284 # temporarily bypass sandboxing 

3285 os.open = os_open 

3286 return tempfile.mkstemp(*args, **kw) 

3287 finally: 

3288 # and then put it back 

3289 os.open = old_open 

3290 

3291 

3292# Silence the PEP440Warning by default, so that end users don't get hit by it 

3293# randomly just because they use pkg_resources. We want to append the rule 

3294# because we want earlier uses of filterwarnings to take precedence over this 

3295# one. 

3296warnings.filterwarnings("ignore", category=PEP440Warning, append=True) 

3297 

3298 

3299# from jaraco.functools 1.3 

3300def _call_aside(f, *args, **kwargs): 

3301 f(*args, **kwargs) 

3302 return f 

3303 

3304 

3305@_call_aside 

3306def _initialize(g=globals()): 

3307 "Set up global resource manager (deliberately not state-saved)" 

3308 manager = ResourceManager() 

3309 g['_manager'] = manager 

3310 g.update( 

3311 (name, getattr(manager, name)) 

3312 for name in dir(manager) 

3313 if not name.startswith('_') 

3314 ) 

3315 

3316 

3317class PkgResourcesDeprecationWarning(Warning): 

3318 """ 

3319 Base class for warning about deprecations in ``pkg_resources`` 

3320 

3321 This class is not derived from ``DeprecationWarning``, and as such is 

3322 visible by default. 

3323 """ 

3324 

3325 

3326@_call_aside 

3327def _initialize_master_working_set(): 

3328 """ 

3329 Prepare the master working set and make the ``require()`` 

3330 API available. 

3331 

3332 This function has explicit effects on the global state 

3333 of pkg_resources. It is intended to be invoked once at 

3334 the initialization of this module. 

3335 

3336 Invocation by other packages is unsupported and done 

3337 at their own risk. 

3338 """ 

3339 working_set = WorkingSet._build_master() 

3340 _declare_state('object', working_set=working_set) 

3341 

3342 require = working_set.require 

3343 iter_entry_points = working_set.iter_entry_points 

3344 add_activation_listener = working_set.subscribe 

3345 run_script = working_set.run_script 

3346 # backward compatibility 

3347 run_main = run_script 

3348 # Activate all distributions already on sys.path with replace=False and 

3349 # ensure that all distributions added to the working set in the future 

3350 # (e.g. by calling ``require()``) will get activated as well, 

3351 # with higher priority (replace=True). 

3352 tuple(dist.activate(replace=False) for dist in working_set) 

3353 add_activation_listener( 

3354 lambda dist: dist.activate(replace=True), 

3355 existing=False, 

3356 ) 

3357 working_set.entries = [] 

3358 # match order 

3359 list(map(working_set.add_entry, sys.path)) 

3360 globals().update(locals())