Coverage for /pythoncovmergedfiles/medio/medio/usr/lib/python3/dist-packages/pkg_resources/__init__.py: 1%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1580 statements  

1# coding: utf-8 

2""" 

3Package resource API 

4-------------------- 

5 

6A resource is a logical file contained within a package, or a logical 

7subdirectory thereof. The package resource API expects resource names 

8to have their path parts separated with ``/``, *not* whatever the local 

9path separator is. Do not use os.path operations to manipulate resource 

10names being passed into the API. 

11 

12The package resource API is designed to work with normal filesystem packages, 

13.egg files, and unpacked .egg files. It can also work in a limited way with 

14.zip files and with custom PEP 302 loaders that support the ``get_data()`` 

15method. 

16""" 

17 

18from __future__ import absolute_import 

19 

20import sys 

21import os 

22import io 

23import time 

24import re 

25import types 

26import zipfile 

27import zipimport 

28import warnings 

29import stat 

30import functools 

31import pkgutil 

32import operator 

33import platform 

34import collections 

35import plistlib 

36import email.parser 

37import errno 

38import tempfile 

39import textwrap 

40import itertools 

41import inspect 

42import ntpath 

43import posixpath 

44from pkgutil import get_importer 

45 

46try: 

47 import _imp 

48except ImportError: 

49 # Python 3.2 compatibility 

50 import imp as _imp 

51 

52try: 

53 FileExistsError 

54except NameError: 

55 FileExistsError = OSError 

56 

57from pkg_resources.extern import six 

58from pkg_resources.extern.six.moves import urllib, map, filter 

59 

60# capture these to bypass sandboxing 

61from os import utime 

62try: 

63 from os import mkdir, rename, unlink 

64 WRITE_SUPPORT = True 

65except ImportError: 

66 # no write support, probably under GAE 

67 WRITE_SUPPORT = False 

68 

69from os import open as os_open 

70from os.path import isdir, split 

71 

72try: 

73 import importlib.machinery as importlib_machinery 

74 # access attribute to force import under delayed import mechanisms. 

75 importlib_machinery.__name__ 

76except ImportError: 

77 importlib_machinery = None 

78 

79from . import py31compat 

80from pkg_resources.extern import appdirs 

81from pkg_resources.extern import packaging 

82__import__('pkg_resources.extern.packaging.version') 

83__import__('pkg_resources.extern.packaging.specifiers') 

84__import__('pkg_resources.extern.packaging.requirements') 

85__import__('pkg_resources.extern.packaging.markers') 

86__import__('pkg_resources.py2_warn') 

87 

88 

89__metaclass__ = type 

90 

91 

92if (3, 0) < sys.version_info < (3, 5): 

93 raise RuntimeError("Python 3.5 or later is required") 

94 

95if six.PY2: 

96 # Those builtin exceptions are only defined in Python 3 

97 PermissionError = None 

98 NotADirectoryError = None 

99 

100# declare some globals that will be defined later to 

101# satisfy the linters. 

102require = None 

103working_set = None 

104add_activation_listener = None 

105resources_stream = None 

106cleanup_resources = None 

107resource_dir = None 

108resource_stream = None 

109set_extraction_path = None 

110resource_isdir = None 

111resource_string = None 

112iter_entry_points = None 

113resource_listdir = None 

114resource_filename = None 

115resource_exists = None 

116_distribution_finders = None 

117_namespace_handlers = None 

118_namespace_packages = None 

119 

120 

121class PEP440Warning(RuntimeWarning): 

122 """ 

123 Used when there is an issue with a version or specifier not complying with 

124 PEP 440. 

125 """ 

126 

127 

128def parse_version(v): 

129 try: 

130 return packaging.version.Version(v) 

131 except packaging.version.InvalidVersion: 

132 return packaging.version.LegacyVersion(v) 

133 

134 

135_state_vars = {} 

136 

137 

138def _declare_state(vartype, **kw): 

139 globals().update(kw) 

140 _state_vars.update(dict.fromkeys(kw, vartype)) 

141 

142 

143def __getstate__(): 

144 state = {} 

145 g = globals() 

146 for k, v in _state_vars.items(): 

147 state[k] = g['_sget_' + v](g[k]) 

148 return state 

149 

150 

151def __setstate__(state): 

152 g = globals() 

153 for k, v in state.items(): 

154 g['_sset_' + _state_vars[k]](k, g[k], v) 

155 return state 

156 

157 

158def _sget_dict(val): 

159 return val.copy() 

160 

161 

162def _sset_dict(key, ob, state): 

163 ob.clear() 

164 ob.update(state) 

165 

166 

167def _sget_object(val): 

168 return val.__getstate__() 

169 

170 

171def _sset_object(key, ob, state): 

172 ob.__setstate__(state) 

173 

174 

175_sget_none = _sset_none = lambda *args: None 

176 

177 

178def get_supported_platform(): 

179 """Return this platform's maximum compatible version. 

180 

181 distutils.util.get_platform() normally reports the minimum version 

182 of Mac OS X that would be required to *use* extensions produced by 

183 distutils. But what we want when checking compatibility is to know the 

184 version of Mac OS X that we are *running*. To allow usage of packages that 

185 explicitly require a newer version of Mac OS X, we must also know the 

186 current version of the OS. 

187 

188 If this condition occurs for any other platform with a version in its 

189 platform strings, this function should be extended accordingly. 

190 """ 

191 plat = get_build_platform() 

192 m = macosVersionString.match(plat) 

193 if m is not None and sys.platform == "darwin": 

194 try: 

195 plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3)) 

196 except ValueError: 

197 # not Mac OS X 

198 pass 

199 return plat 

200 

201 

202__all__ = [ 

203 # Basic resource access and distribution/entry point discovery 

204 'require', 'run_script', 'get_provider', 'get_distribution', 

205 'load_entry_point', 'get_entry_map', 'get_entry_info', 

206 'iter_entry_points', 

207 'resource_string', 'resource_stream', 'resource_filename', 

208 'resource_listdir', 'resource_exists', 'resource_isdir', 

209 

210 # Environmental control 

211 'declare_namespace', 'working_set', 'add_activation_listener', 

212 'find_distributions', 'set_extraction_path', 'cleanup_resources', 

213 'get_default_cache', 

214 

215 # Primary implementation classes 

216 'Environment', 'WorkingSet', 'ResourceManager', 

217 'Distribution', 'Requirement', 'EntryPoint', 

218 

219 # Exceptions 

220 'ResolutionError', 'VersionConflict', 'DistributionNotFound', 

221 'UnknownExtra', 'ExtractionError', 

222 

223 # Warnings 

224 'PEP440Warning', 

225 

226 # Parsing functions and string utilities 

227 'parse_requirements', 'parse_version', 'safe_name', 'safe_version', 

228 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections', 

229 'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker', 

230 

231 # filesystem utilities 

232 'ensure_directory', 'normalize_path', 

233 

234 # Distribution "precedence" constants 

235 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST', 

236 

237 # "Provider" interfaces, implementations, and registration/lookup APIs 

238 'IMetadataProvider', 'IResourceProvider', 'FileMetadata', 

239 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider', 

240 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider', 

241 'register_finder', 'register_namespace_handler', 'register_loader_type', 

242 'fixup_namespace_packages', 'get_importer', 

243 

244 # Warnings 

245 'PkgResourcesDeprecationWarning', 

246 

247 # Deprecated/backward compatibility only 

248 'run_main', 'AvailableDistributions', 

249] 

250 

251 

252class ResolutionError(Exception): 

253 """Abstract base for dependency resolution errors""" 

254 

255 def __repr__(self): 

256 return self.__class__.__name__ + repr(self.args) 

257 

258 

259class VersionConflict(ResolutionError): 

260 """ 

261 An already-installed version conflicts with the requested version. 

262 

263 Should be initialized with the installed Distribution and the requested 

264 Requirement. 

265 """ 

266 

267 _template = "{self.dist} is installed but {self.req} is required" 

268 

269 @property 

270 def dist(self): 

271 return self.args[0] 

272 

273 @property 

274 def req(self): 

275 return self.args[1] 

276 

277 def report(self): 

278 return self._template.format(**locals()) 

279 

280 def with_context(self, required_by): 

281 """ 

282 If required_by is non-empty, return a version of self that is a 

283 ContextualVersionConflict. 

284 """ 

285 if not required_by: 

286 return self 

287 args = self.args + (required_by,) 

288 return ContextualVersionConflict(*args) 

289 

290 

291class ContextualVersionConflict(VersionConflict): 

292 """ 

293 A VersionConflict that accepts a third parameter, the set of the 

294 requirements that required the installed Distribution. 

295 """ 

296 

297 _template = VersionConflict._template + ' by {self.required_by}' 

298 

299 @property 

300 def required_by(self): 

301 return self.args[2] 

302 

303 

304class DistributionNotFound(ResolutionError): 

305 """A requested distribution was not found""" 

306 

307 _template = ("The '{self.req}' distribution was not found " 

308 "and is required by {self.requirers_str}") 

309 

310 @property 

311 def req(self): 

312 return self.args[0] 

313 

314 @property 

315 def requirers(self): 

316 return self.args[1] 

317 

318 @property 

319 def requirers_str(self): 

320 if not self.requirers: 

321 return 'the application' 

322 return ', '.join(self.requirers) 

323 

324 def report(self): 

325 return self._template.format(**locals()) 

326 

327 def __str__(self): 

328 return self.report() 

329 

330 

331class UnknownExtra(ResolutionError): 

332 """Distribution doesn't have an "extra feature" of the given name""" 

333 

334 

335_provider_factories = {} 

336 

337PY_MAJOR = '{}.{}'.format(*sys.version_info) 

338EGG_DIST = 3 

339BINARY_DIST = 2 

340SOURCE_DIST = 1 

341CHECKOUT_DIST = 0 

342DEVELOP_DIST = -1 

343 

344 

345def register_loader_type(loader_type, provider_factory): 

346 """Register `provider_factory` to make providers for `loader_type` 

347 

348 `loader_type` is the type or class of a PEP 302 ``module.__loader__``, 

349 and `provider_factory` is a function that, passed a *module* object, 

350 returns an ``IResourceProvider`` for that module. 

351 """ 

352 _provider_factories[loader_type] = provider_factory 

353 

354 

355def get_provider(moduleOrReq): 

356 """Return an IResourceProvider for the named module or requirement""" 

357 if isinstance(moduleOrReq, Requirement): 

358 return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0] 

359 try: 

360 module = sys.modules[moduleOrReq] 

361 except KeyError: 

362 __import__(moduleOrReq) 

363 module = sys.modules[moduleOrReq] 

364 loader = getattr(module, '__loader__', None) 

365 return _find_adapter(_provider_factories, loader)(module) 

366 

367 

368def _macosx_vers(_cache=[]): 

369 if not _cache: 

370 version = platform.mac_ver()[0] 

371 # fallback for MacPorts 

372 if version == '': 

373 plist = '/System/Library/CoreServices/SystemVersion.plist' 

374 if os.path.exists(plist): 

375 if hasattr(plistlib, 'readPlist'): 

376 plist_content = plistlib.readPlist(plist) 

377 if 'ProductVersion' in plist_content: 

378 version = plist_content['ProductVersion'] 

379 

380 _cache.append(version.split('.')) 

381 return _cache[0] 

382 

383 

384def _macosx_arch(machine): 

385 return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine) 

386 

387 

388def get_build_platform(): 

389 """Return this platform's string for platform-specific distributions 

390 

391 XXX Currently this is the same as ``distutils.util.get_platform()``, but it 

392 needs some hacks for Linux and Mac OS X. 

393 """ 

394 from sysconfig import get_platform 

395 

396 plat = get_platform() 

397 if sys.platform == "darwin" and not plat.startswith('macosx-'): 

398 try: 

399 version = _macosx_vers() 

400 machine = os.uname()[4].replace(" ", "_") 

401 return "macosx-%d.%d-%s" % ( 

402 int(version[0]), int(version[1]), 

403 _macosx_arch(machine), 

404 ) 

405 except ValueError: 

406 # if someone is running a non-Mac darwin system, this will fall 

407 # through to the default implementation 

408 pass 

409 return plat 

410 

411 

412macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)") 

413darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)") 

414# XXX backward compat 

415get_platform = get_build_platform 

416 

417 

418def compatible_platforms(provided, required): 

419 """Can code for the `provided` platform run on the `required` platform? 

420 

421 Returns true if either platform is ``None``, or the platforms are equal. 

422 

423 XXX Needs compatibility checks for Linux and other unixy OSes. 

424 """ 

425 if provided is None or required is None or provided == required: 

426 # easy case 

427 return True 

428 

429 # Mac OS X special cases 

430 reqMac = macosVersionString.match(required) 

431 if reqMac: 

432 provMac = macosVersionString.match(provided) 

433 

434 # is this a Mac package? 

435 if not provMac: 

436 # this is backwards compatibility for packages built before 

437 # setuptools 0.6. All packages built after this point will 

438 # use the new macosx designation. 

439 provDarwin = darwinVersionString.match(provided) 

440 if provDarwin: 

441 dversion = int(provDarwin.group(1)) 

442 macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2)) 

443 if dversion == 7 and macosversion >= "10.3" or \ 

444 dversion == 8 and macosversion >= "10.4": 

445 return True 

446 # egg isn't macosx or legacy darwin 

447 return False 

448 

449 # are they the same major version and machine type? 

450 if provMac.group(1) != reqMac.group(1) or \ 

451 provMac.group(3) != reqMac.group(3): 

452 return False 

453 

454 # is the required OS major update >= the provided one? 

455 if int(provMac.group(2)) > int(reqMac.group(2)): 

456 return False 

457 

458 return True 

459 

460 # XXX Linux and other platforms' special cases should go here 

461 return False 

462 

463 

464def run_script(dist_spec, script_name): 

465 """Locate distribution `dist_spec` and run its `script_name` script""" 

466 ns = sys._getframe(1).f_globals 

467 name = ns['__name__'] 

468 ns.clear() 

469 ns['__name__'] = name 

470 require(dist_spec)[0].run_script(script_name, ns) 

471 

472 

473# backward compatibility 

474run_main = run_script 

475 

476 

477def get_distribution(dist): 

478 """Return a current distribution object for a Requirement or string""" 

479 if isinstance(dist, six.string_types): 

480 dist = Requirement.parse(dist) 

481 if isinstance(dist, Requirement): 

482 dist = get_provider(dist) 

483 if not isinstance(dist, Distribution): 

484 raise TypeError("Expected string, Requirement, or Distribution", dist) 

485 return dist 

486 

487 

488def load_entry_point(dist, group, name): 

489 """Return `name` entry point of `group` for `dist` or raise ImportError""" 

490 return get_distribution(dist).load_entry_point(group, name) 

491 

492 

493def get_entry_map(dist, group=None): 

494 """Return the entry point map for `group`, or the full entry map""" 

495 return get_distribution(dist).get_entry_map(group) 

496 

497 

498def get_entry_info(dist, group, name): 

499 """Return the EntryPoint object for `group`+`name`, or ``None``""" 

500 return get_distribution(dist).get_entry_info(group, name) 

501 

502 

503class IMetadataProvider: 

504 def has_metadata(name): 

505 """Does the package's distribution contain the named metadata?""" 

506 

507 def get_metadata(name): 

508 """The named metadata resource as a string""" 

509 

510 def get_metadata_lines(name): 

511 """Yield named metadata resource as list of non-blank non-comment lines 

512 

513 Leading and trailing whitespace is stripped from each line, and lines 

514 with ``#`` as the first non-blank character are omitted.""" 

515 

516 def metadata_isdir(name): 

517 """Is the named metadata a directory? (like ``os.path.isdir()``)""" 

518 

519 def metadata_listdir(name): 

520 """List of metadata names in the directory (like ``os.listdir()``)""" 

521 

522 def run_script(script_name, namespace): 

523 """Execute the named script in the supplied namespace dictionary""" 

524 

525 

526class IResourceProvider(IMetadataProvider): 

527 """An object that provides access to package resources""" 

528 

529 def get_resource_filename(manager, resource_name): 

530 """Return a true filesystem path for `resource_name` 

531 

532 `manager` must be an ``IResourceManager``""" 

533 

534 def get_resource_stream(manager, resource_name): 

535 """Return a readable file-like object for `resource_name` 

536 

537 `manager` must be an ``IResourceManager``""" 

538 

539 def get_resource_string(manager, resource_name): 

540 """Return a string containing the contents of `resource_name` 

541 

542 `manager` must be an ``IResourceManager``""" 

543 

544 def has_resource(resource_name): 

545 """Does the package contain the named resource?""" 

546 

547 def resource_isdir(resource_name): 

548 """Is the named resource a directory? (like ``os.path.isdir()``)""" 

549 

550 def resource_listdir(resource_name): 

551 """List of resource names in the directory (like ``os.listdir()``)""" 

552 

553 

554class WorkingSet: 

555 """A collection of active distributions on sys.path (or a similar list)""" 

556 

557 def __init__(self, entries=None): 

558 """Create working set from list of path entries (default=sys.path)""" 

559 self.entries = [] 

560 self.entry_keys = {} 

561 self.by_key = {} 

562 self.callbacks = [] 

563 

564 if entries is None: 

565 entries = sys.path 

566 

567 for entry in entries: 

568 self.add_entry(entry) 

569 

570 @classmethod 

571 def _build_master(cls): 

572 """ 

573 Prepare the master working set. 

574 """ 

575 ws = cls() 

576 try: 

577 from __main__ import __requires__ 

578 except ImportError: 

579 # The main program does not list any requirements 

580 return ws 

581 

582 # ensure the requirements are met 

583 try: 

584 ws.require(__requires__) 

585 except VersionConflict: 

586 return cls._build_from_requirements(__requires__) 

587 

588 return ws 

589 

590 @classmethod 

591 def _build_from_requirements(cls, req_spec): 

592 """ 

593 Build a working set from a requirement spec. Rewrites sys.path. 

594 """ 

595 # try it without defaults already on sys.path 

596 # by starting with an empty path 

597 ws = cls([]) 

598 reqs = parse_requirements(req_spec) 

599 dists = ws.resolve(reqs, Environment()) 

600 for dist in dists: 

601 ws.add(dist) 

602 

603 # add any missing entries from sys.path 

604 for entry in sys.path: 

605 if entry not in ws.entries: 

606 ws.add_entry(entry) 

607 

608 # then copy back to sys.path 

609 sys.path[:] = ws.entries 

610 return ws 

611 

612 def add_entry(self, entry): 

613 """Add a path item to ``.entries``, finding any distributions on it 

614 

615 ``find_distributions(entry, True)`` is used to find distributions 

616 corresponding to the path entry, and they are added. `entry` is 

617 always appended to ``.entries``, even if it is already present. 

618 (This is because ``sys.path`` can contain the same value more than 

619 once, and the ``.entries`` of the ``sys.path`` WorkingSet should always 

620 equal ``sys.path``.) 

621 """ 

622 self.entry_keys.setdefault(entry, []) 

623 self.entries.append(entry) 

624 for dist in find_distributions(entry, True): 

625 self.add(dist, entry, False) 

626 

627 def __contains__(self, dist): 

628 """True if `dist` is the active distribution for its project""" 

629 return self.by_key.get(dist.key) == dist 

630 

631 def find(self, req): 

632 """Find a distribution matching requirement `req` 

633 

634 If there is an active distribution for the requested project, this 

635 returns it as long as it meets the version requirement specified by 

636 `req`. But, if there is an active distribution for the project and it 

637 does *not* meet the `req` requirement, ``VersionConflict`` is raised. 

638 If there is no active distribution for the requested project, ``None`` 

639 is returned. 

640 """ 

641 dist = self.by_key.get(req.key) 

642 if dist is not None and dist not in req: 

643 # XXX add more info 

644 raise VersionConflict(dist, req) 

645 return dist 

646 

647 def iter_entry_points(self, group, name=None): 

648 """Yield entry point objects from `group` matching `name` 

649 

650 If `name` is None, yields all entry points in `group` from all 

651 distributions in the working set, otherwise only ones matching 

652 both `group` and `name` are yielded (in distribution order). 

653 """ 

654 return ( 

655 entry 

656 for dist in self 

657 for entry in dist.get_entry_map(group).values() 

658 if name is None or name == entry.name 

659 ) 

660 

661 def run_script(self, requires, script_name): 

662 """Locate distribution for `requires` and run `script_name` script""" 

663 ns = sys._getframe(1).f_globals 

664 name = ns['__name__'] 

665 ns.clear() 

666 ns['__name__'] = name 

667 self.require(requires)[0].run_script(script_name, ns) 

668 

669 def __iter__(self): 

670 """Yield distributions for non-duplicate projects in the working set 

671 

672 The yield order is the order in which the items' path entries were 

673 added to the working set. 

674 """ 

675 seen = {} 

676 for item in self.entries: 

677 if item not in self.entry_keys: 

678 # workaround a cache issue 

679 continue 

680 

681 for key in self.entry_keys[item]: 

682 if key not in seen: 

683 seen[key] = 1 

684 yield self.by_key[key] 

685 

686 def add(self, dist, entry=None, insert=True, replace=False): 

687 """Add `dist` to working set, associated with `entry` 

688 

689 If `entry` is unspecified, it defaults to the ``.location`` of `dist`. 

690 On exit from this routine, `entry` is added to the end of the working 

691 set's ``.entries`` (if it wasn't already present). 

692 

693 `dist` is only added to the working set if it's for a project that 

694 doesn't already have a distribution in the set, unless `replace=True`. 

695 If it's added, any callbacks registered with the ``subscribe()`` method 

696 will be called. 

697 """ 

698 if insert: 

699 dist.insert_on(self.entries, entry, replace=replace) 

700 

701 if entry is None: 

702 entry = dist.location 

703 keys = self.entry_keys.setdefault(entry, []) 

704 keys2 = self.entry_keys.setdefault(dist.location, []) 

705 if not replace and dist.key in self.by_key: 

706 # ignore hidden distros 

707 return 

708 

709 self.by_key[dist.key] = dist 

710 if dist.key not in keys: 

711 keys.append(dist.key) 

712 if dist.key not in keys2: 

713 keys2.append(dist.key) 

714 self._added_new(dist) 

715 

716 def resolve(self, requirements, env=None, installer=None, 

717 replace_conflicting=False, extras=None): 

718 """List all distributions needed to (recursively) meet `requirements` 

719 

720 `requirements` must be a sequence of ``Requirement`` objects. `env`, 

721 if supplied, should be an ``Environment`` instance. If 

722 not supplied, it defaults to all distributions available within any 

723 entry or distribution in the working set. `installer`, if supplied, 

724 will be invoked with each requirement that cannot be met by an 

725 already-installed distribution; it should return a ``Distribution`` or 

726 ``None``. 

727 

728 Unless `replace_conflicting=True`, raises a VersionConflict exception 

729 if 

730 any requirements are found on the path that have the correct name but 

731 the wrong version. Otherwise, if an `installer` is supplied it will be 

732 invoked to obtain the correct version of the requirement and activate 

733 it. 

734 

735 `extras` is a list of the extras to be used with these requirements. 

736 This is important because extra requirements may look like `my_req; 

737 extra = "my_extra"`, which would otherwise be interpreted as a purely 

738 optional requirement. Instead, we want to be able to assert that these 

739 requirements are truly required. 

740 """ 

741 

742 # set up the stack 

743 requirements = list(requirements)[::-1] 

744 # set of processed requirements 

745 processed = {} 

746 # key -> dist 

747 best = {} 

748 to_activate = [] 

749 

750 req_extras = _ReqExtras() 

751 

752 # Mapping of requirement to set of distributions that required it; 

753 # useful for reporting info about conflicts. 

754 required_by = collections.defaultdict(set) 

755 

756 while requirements: 

757 # process dependencies breadth-first 

758 req = requirements.pop(0) 

759 if req in processed: 

760 # Ignore cyclic or redundant dependencies 

761 continue 

762 

763 if not req_extras.markers_pass(req, extras): 

764 continue 

765 

766 dist = best.get(req.key) 

767 if dist is None: 

768 # Find the best distribution and add it to the map 

769 dist = self.by_key.get(req.key) 

770 if dist is None or (dist not in req and replace_conflicting): 

771 ws = self 

772 if env is None: 

773 if dist is None: 

774 env = Environment(self.entries) 

775 else: 

776 # Use an empty environment and workingset to avoid 

777 # any further conflicts with the conflicting 

778 # distribution 

779 env = Environment([]) 

780 ws = WorkingSet([]) 

781 dist = best[req.key] = env.best_match( 

782 req, ws, installer, 

783 replace_conflicting=replace_conflicting 

784 ) 

785 if dist is None: 

786 requirers = required_by.get(req, None) 

787 raise DistributionNotFound(req, requirers) 

788 to_activate.append(dist) 

789 if dist not in req: 

790 # Oops, the "best" so far conflicts with a dependency 

791 dependent_req = required_by[req] 

792 raise VersionConflict(dist, req).with_context(dependent_req) 

793 

794 # push the new requirements onto the stack 

795 new_requirements = dist.requires(req.extras)[::-1] 

796 requirements.extend(new_requirements) 

797 

798 # Register the new requirements needed by req 

799 for new_requirement in new_requirements: 

800 required_by[new_requirement].add(req.project_name) 

801 req_extras[new_requirement] = req.extras 

802 

803 processed[req] = True 

804 

805 # return list of distros to activate 

806 return to_activate 

807 

808 def find_plugins( 

809 self, plugin_env, full_env=None, installer=None, fallback=True): 

810 """Find all activatable distributions in `plugin_env` 

811 

812 Example usage:: 

813 

814 distributions, errors = working_set.find_plugins( 

815 Environment(plugin_dirlist) 

816 ) 

817 # add plugins+libs to sys.path 

818 map(working_set.add, distributions) 

819 # display errors 

820 print('Could not load', errors) 

821 

822 The `plugin_env` should be an ``Environment`` instance that contains 

823 only distributions that are in the project's "plugin directory" or 

824 directories. The `full_env`, if supplied, should be an ``Environment`` 

825 contains all currently-available distributions. If `full_env` is not 

826 supplied, one is created automatically from the ``WorkingSet`` this 

827 method is called on, which will typically mean that every directory on 

828 ``sys.path`` will be scanned for distributions. 

829 

830 `installer` is a standard installer callback as used by the 

831 ``resolve()`` method. The `fallback` flag indicates whether we should 

832 attempt to resolve older versions of a plugin if the newest version 

833 cannot be resolved. 

834 

835 This method returns a 2-tuple: (`distributions`, `error_info`), where 

836 `distributions` is a list of the distributions found in `plugin_env` 

837 that were loadable, along with any other distributions that are needed 

838 to resolve their dependencies. `error_info` is a dictionary mapping 

839 unloadable plugin distributions to an exception instance describing the 

840 error that occurred. Usually this will be a ``DistributionNotFound`` or 

841 ``VersionConflict`` instance. 

842 """ 

843 

844 plugin_projects = list(plugin_env) 

845 # scan project names in alphabetic order 

846 plugin_projects.sort() 

847 

848 error_info = {} 

849 distributions = {} 

850 

851 if full_env is None: 

852 env = Environment(self.entries) 

853 env += plugin_env 

854 else: 

855 env = full_env + plugin_env 

856 

857 shadow_set = self.__class__([]) 

858 # put all our entries in shadow_set 

859 list(map(shadow_set.add, self)) 

860 

861 for project_name in plugin_projects: 

862 

863 for dist in plugin_env[project_name]: 

864 

865 req = [dist.as_requirement()] 

866 

867 try: 

868 resolvees = shadow_set.resolve(req, env, installer) 

869 

870 except ResolutionError as v: 

871 # save error info 

872 error_info[dist] = v 

873 if fallback: 

874 # try the next older version of project 

875 continue 

876 else: 

877 # give up on this project, keep going 

878 break 

879 

880 else: 

881 list(map(shadow_set.add, resolvees)) 

882 distributions.update(dict.fromkeys(resolvees)) 

883 

884 # success, no need to try any more versions of this project 

885 break 

886 

887 distributions = list(distributions) 

888 distributions.sort() 

889 

890 return distributions, error_info 

891 

892 def require(self, *requirements): 

893 """Ensure that distributions matching `requirements` are activated 

894 

895 `requirements` must be a string or a (possibly-nested) sequence 

896 thereof, specifying the distributions and versions required. The 

897 return value is a sequence of the distributions that needed to be 

898 activated to fulfill the requirements; all relevant distributions are 

899 included, even if they were already activated in this working set. 

900 """ 

901 needed = self.resolve(parse_requirements(requirements)) 

902 

903 for dist in needed: 

904 self.add(dist) 

905 

906 return needed 

907 

908 def subscribe(self, callback, existing=True): 

909 """Invoke `callback` for all distributions 

910 

911 If `existing=True` (default), 

912 call on all existing ones, as well. 

913 """ 

914 if callback in self.callbacks: 

915 return 

916 self.callbacks.append(callback) 

917 if not existing: 

918 return 

919 for dist in self: 

920 callback(dist) 

921 

922 def _added_new(self, dist): 

923 for callback in self.callbacks: 

924 callback(dist) 

925 

926 def __getstate__(self): 

927 return ( 

928 self.entries[:], self.entry_keys.copy(), self.by_key.copy(), 

929 self.callbacks[:] 

930 ) 

931 

932 def __setstate__(self, e_k_b_c): 

933 entries, keys, by_key, callbacks = e_k_b_c 

934 self.entries = entries[:] 

935 self.entry_keys = keys.copy() 

936 self.by_key = by_key.copy() 

937 self.callbacks = callbacks[:] 

938 

939 

940class _ReqExtras(dict): 

941 """ 

942 Map each requirement to the extras that demanded it. 

943 """ 

944 

945 def markers_pass(self, req, extras=None): 

946 """ 

947 Evaluate markers for req against each extra that 

948 demanded it. 

949 

950 Return False if the req has a marker and fails 

951 evaluation. Otherwise, return True. 

952 """ 

953 extra_evals = ( 

954 req.marker.evaluate({'extra': extra}) 

955 for extra in self.get(req, ()) + (extras or (None,)) 

956 ) 

957 return not req.marker or any(extra_evals) 

958 

959 

960class Environment: 

961 """Searchable snapshot of distributions on a search path""" 

962 

963 def __init__( 

964 self, search_path=None, platform=get_supported_platform(), 

965 python=PY_MAJOR): 

966 """Snapshot distributions available on a search path 

967 

968 Any distributions found on `search_path` are added to the environment. 

969 `search_path` should be a sequence of ``sys.path`` items. If not 

970 supplied, ``sys.path`` is used. 

971 

972 `platform` is an optional string specifying the name of the platform 

973 that platform-specific distributions must be compatible with. If 

974 unspecified, it defaults to the current platform. `python` is an 

975 optional string naming the desired version of Python (e.g. ``'3.6'``); 

976 it defaults to the current version. 

977 

978 You may explicitly set `platform` (and/or `python`) to ``None`` if you 

979 wish to map *all* distributions, not just those compatible with the 

980 running platform or Python version. 

981 """ 

982 self._distmap = {} 

983 self.platform = platform 

984 self.python = python 

985 self.scan(search_path) 

986 

987 def can_add(self, dist): 

988 """Is distribution `dist` acceptable for this environment? 

989 

990 The distribution must match the platform and python version 

991 requirements specified when this environment was created, or False 

992 is returned. 

993 """ 

994 py_compat = ( 

995 self.python is None 

996 or dist.py_version is None 

997 or dist.py_version == self.python 

998 ) 

999 return py_compat and compatible_platforms(dist.platform, self.platform) 

1000 

1001 def remove(self, dist): 

1002 """Remove `dist` from the environment""" 

1003 self._distmap[dist.key].remove(dist) 

1004 

1005 def scan(self, search_path=None): 

1006 """Scan `search_path` for distributions usable in this environment 

1007 

1008 Any distributions found are added to the environment. 

1009 `search_path` should be a sequence of ``sys.path`` items. If not 

1010 supplied, ``sys.path`` is used. Only distributions conforming to 

1011 the platform/python version defined at initialization are added. 

1012 """ 

1013 if search_path is None: 

1014 search_path = sys.path 

1015 

1016 for item in search_path: 

1017 for dist in find_distributions(item): 

1018 self.add(dist) 

1019 

1020 def __getitem__(self, project_name): 

1021 """Return a newest-to-oldest list of distributions for `project_name` 

1022 

1023 Uses case-insensitive `project_name` comparison, assuming all the 

1024 project's distributions use their project's name converted to all 

1025 lowercase as their key. 

1026 

1027 """ 

1028 distribution_key = project_name.lower() 

1029 return self._distmap.get(distribution_key, []) 

1030 

1031 def add(self, dist): 

1032 """Add `dist` if we ``can_add()`` it and it has not already been added 

1033 """ 

1034 if self.can_add(dist) and dist.has_version(): 

1035 dists = self._distmap.setdefault(dist.key, []) 

1036 if dist not in dists: 

1037 dists.append(dist) 

1038 dists.sort(key=operator.attrgetter('hashcmp'), reverse=True) 

1039 

1040 def best_match( 

1041 self, req, working_set, installer=None, replace_conflicting=False): 

1042 """Find distribution best matching `req` and usable on `working_set` 

1043 

1044 This calls the ``find(req)`` method of the `working_set` to see if a 

1045 suitable distribution is already active. (This may raise 

1046 ``VersionConflict`` if an unsuitable version of the project is already 

1047 active in the specified `working_set`.) If a suitable distribution 

1048 isn't active, this method returns the newest distribution in the 

1049 environment that meets the ``Requirement`` in `req`. If no suitable 

1050 distribution is found, and `installer` is supplied, then the result of 

1051 calling the environment's ``obtain(req, installer)`` method will be 

1052 returned. 

1053 """ 

1054 try: 

1055 dist = working_set.find(req) 

1056 except VersionConflict: 

1057 if not replace_conflicting: 

1058 raise 

1059 dist = None 

1060 if dist is not None: 

1061 return dist 

1062 for dist in self[req.key]: 

1063 if dist in req: 

1064 return dist 

1065 # try to download/install 

1066 return self.obtain(req, installer) 

1067 

1068 def obtain(self, requirement, installer=None): 

1069 """Obtain a distribution matching `requirement` (e.g. via download) 

1070 

1071 Obtain a distro that matches requirement (e.g. via download). In the 

1072 base ``Environment`` class, this routine just returns 

1073 ``installer(requirement)``, unless `installer` is None, in which case 

1074 None is returned instead. This method is a hook that allows subclasses 

1075 to attempt other ways of obtaining a distribution before falling back 

1076 to the `installer` argument.""" 

1077 if installer is not None: 

1078 return installer(requirement) 

1079 

1080 def __iter__(self): 

1081 """Yield the unique project names of the available distributions""" 

1082 for key in self._distmap.keys(): 

1083 if self[key]: 

1084 yield key 

1085 

1086 def __iadd__(self, other): 

1087 """In-place addition of a distribution or environment""" 

1088 if isinstance(other, Distribution): 

1089 self.add(other) 

1090 elif isinstance(other, Environment): 

1091 for project in other: 

1092 for dist in other[project]: 

1093 self.add(dist) 

1094 else: 

1095 raise TypeError("Can't add %r to environment" % (other,)) 

1096 return self 

1097 

1098 def __add__(self, other): 

1099 """Add an environment or distribution to an environment""" 

1100 new = self.__class__([], platform=None, python=None) 

1101 for env in self, other: 

1102 new += env 

1103 return new 

1104 

1105 

1106# XXX backward compatibility 

1107AvailableDistributions = Environment 

1108 

1109 

1110class ExtractionError(RuntimeError): 

1111 """An error occurred extracting a resource 

1112 

1113 The following attributes are available from instances of this exception: 

1114 

1115 manager 

1116 The resource manager that raised this exception 

1117 

1118 cache_path 

1119 The base directory for resource extraction 

1120 

1121 original_error 

1122 The exception instance that caused extraction to fail 

1123 """ 

1124 

1125 

1126class ResourceManager: 

1127 """Manage resource extraction and packages""" 

1128 extraction_path = None 

1129 

1130 def __init__(self): 

1131 self.cached_files = {} 

1132 

1133 def resource_exists(self, package_or_requirement, resource_name): 

1134 """Does the named resource exist?""" 

1135 return get_provider(package_or_requirement).has_resource(resource_name) 

1136 

1137 def resource_isdir(self, package_or_requirement, resource_name): 

1138 """Is the named resource an existing directory?""" 

1139 return get_provider(package_or_requirement).resource_isdir( 

1140 resource_name 

1141 ) 

1142 

1143 def resource_filename(self, package_or_requirement, resource_name): 

1144 """Return a true filesystem path for specified resource""" 

1145 return get_provider(package_or_requirement).get_resource_filename( 

1146 self, resource_name 

1147 ) 

1148 

1149 def resource_stream(self, package_or_requirement, resource_name): 

1150 """Return a readable file-like object for specified resource""" 

1151 return get_provider(package_or_requirement).get_resource_stream( 

1152 self, resource_name 

1153 ) 

1154 

1155 def resource_string(self, package_or_requirement, resource_name): 

1156 """Return specified resource as a string""" 

1157 return get_provider(package_or_requirement).get_resource_string( 

1158 self, resource_name 

1159 ) 

1160 

1161 def resource_listdir(self, package_or_requirement, resource_name): 

1162 """List the contents of the named resource directory""" 

1163 return get_provider(package_or_requirement).resource_listdir( 

1164 resource_name 

1165 ) 

1166 

1167 def extraction_error(self): 

1168 """Give an error message for problems extracting file(s)""" 

1169 

1170 old_exc = sys.exc_info()[1] 

1171 cache_path = self.extraction_path or get_default_cache() 

1172 

1173 tmpl = textwrap.dedent(""" 

1174 Can't extract file(s) to egg cache 

1175 

1176 The following error occurred while trying to extract file(s) 

1177 to the Python egg cache: 

1178 

1179 {old_exc} 

1180 

1181 The Python egg cache directory is currently set to: 

1182 

1183 {cache_path} 

1184 

1185 Perhaps your account does not have write access to this directory? 

1186 You can change the cache directory by setting the PYTHON_EGG_CACHE 

1187 environment variable to point to an accessible directory. 

1188 """).lstrip() 

1189 err = ExtractionError(tmpl.format(**locals())) 

1190 err.manager = self 

1191 err.cache_path = cache_path 

1192 err.original_error = old_exc 

1193 raise err 

1194 

1195 def get_cache_path(self, archive_name, names=()): 

1196 """Return absolute location in cache for `archive_name` and `names` 

1197 

1198 The parent directory of the resulting path will be created if it does 

1199 not already exist. `archive_name` should be the base filename of the 

1200 enclosing egg (which may not be the name of the enclosing zipfile!), 

1201 including its ".egg" extension. `names`, if provided, should be a 

1202 sequence of path name parts "under" the egg's extraction location. 

1203 

1204 This method should only be called by resource providers that need to 

1205 obtain an extraction location, and only for names they intend to 

1206 extract, as it tracks the generated names for possible cleanup later. 

1207 """ 

1208 extract_path = self.extraction_path or get_default_cache() 

1209 target_path = os.path.join(extract_path, archive_name + '-tmp', *names) 

1210 try: 

1211 _bypass_ensure_directory(target_path) 

1212 except Exception: 

1213 self.extraction_error() 

1214 

1215 self._warn_unsafe_extraction_path(extract_path) 

1216 

1217 self.cached_files[target_path] = 1 

1218 return target_path 

1219 

1220 @staticmethod 

1221 def _warn_unsafe_extraction_path(path): 

1222 """ 

1223 If the default extraction path is overridden and set to an insecure 

1224 location, such as /tmp, it opens up an opportunity for an attacker to 

1225 replace an extracted file with an unauthorized payload. Warn the user 

1226 if a known insecure location is used. 

1227 

1228 See Distribute #375 for more details. 

1229 """ 

1230 if os.name == 'nt' and not path.startswith(os.environ['windir']): 

1231 # On Windows, permissions are generally restrictive by default 

1232 # and temp directories are not writable by other users, so 

1233 # bypass the warning. 

1234 return 

1235 mode = os.stat(path).st_mode 

1236 if mode & stat.S_IWOTH or mode & stat.S_IWGRP: 

1237 msg = ( 

1238 "%s is writable by group/others and vulnerable to attack " 

1239 "when " 

1240 "used with get_resource_filename. Consider a more secure " 

1241 "location (set with .set_extraction_path or the " 

1242 "PYTHON_EGG_CACHE environment variable)." % path 

1243 ) 

1244 warnings.warn(msg, UserWarning) 

1245 

1246 def postprocess(self, tempname, filename): 

1247 """Perform any platform-specific postprocessing of `tempname` 

1248 

1249 This is where Mac header rewrites should be done; other platforms don't 

1250 have anything special they should do. 

1251 

1252 Resource providers should call this method ONLY after successfully 

1253 extracting a compressed resource. They must NOT call it on resources 

1254 that are already in the filesystem. 

1255 

1256 `tempname` is the current (temporary) name of the file, and `filename` 

1257 is the name it will be renamed to by the caller after this routine 

1258 returns. 

1259 """ 

1260 

1261 if os.name == 'posix': 

1262 # Make the resource executable 

1263 mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777 

1264 os.chmod(tempname, mode) 

1265 

1266 def set_extraction_path(self, path): 

1267 """Set the base path where resources will be extracted to, if needed. 

1268 

1269 If you do not call this routine before any extractions take place, the 

1270 path defaults to the return value of ``get_default_cache()``. (Which 

1271 is based on the ``PYTHON_EGG_CACHE`` environment variable, with various 

1272 platform-specific fallbacks. See that routine's documentation for more 

1273 details.) 

1274 

1275 Resources are extracted to subdirectories of this path based upon 

1276 information given by the ``IResourceProvider``. You may set this to a 

1277 temporary directory, but then you must call ``cleanup_resources()`` to 

1278 delete the extracted files when done. There is no guarantee that 

1279 ``cleanup_resources()`` will be able to remove all extracted files. 

1280 

1281 (Note: you may not change the extraction path for a given resource 

1282 manager once resources have been extracted, unless you first call 

1283 ``cleanup_resources()``.) 

1284 """ 

1285 if self.cached_files: 

1286 raise ValueError( 

1287 "Can't change extraction path, files already extracted" 

1288 ) 

1289 

1290 self.extraction_path = path 

1291 

1292 def cleanup_resources(self, force=False): 

1293 """ 

1294 Delete all extracted resource files and directories, returning a list 

1295 of the file and directory names that could not be successfully removed. 

1296 This function does not have any concurrency protection, so it should 

1297 generally only be called when the extraction path is a temporary 

1298 directory exclusive to a single process. This method is not 

1299 automatically called; you must call it explicitly or register it as an 

1300 ``atexit`` function if you wish to ensure cleanup of a temporary 

1301 directory used for extractions. 

1302 """ 

1303 # XXX 

1304 

1305 

1306def get_default_cache(): 

1307 """ 

1308 Return the ``PYTHON_EGG_CACHE`` environment variable 

1309 or a platform-relevant user cache dir for an app 

1310 named "Python-Eggs". 

1311 """ 

1312 return ( 

1313 os.environ.get('PYTHON_EGG_CACHE') 

1314 or appdirs.user_cache_dir(appname='Python-Eggs') 

1315 ) 

1316 

1317 

1318def safe_name(name): 

1319 """Convert an arbitrary string to a standard distribution name 

1320 

1321 Any runs of non-alphanumeric/. characters are replaced with a single '-'. 

1322 """ 

1323 return re.sub('[^A-Za-z0-9.]+', '-', name) 

1324 

1325 

1326def safe_version(version): 

1327 """ 

1328 Convert an arbitrary string to a standard version string 

1329 """ 

1330 try: 

1331 # normalize the version 

1332 return str(packaging.version.Version(version)) 

1333 except packaging.version.InvalidVersion: 

1334 version = version.replace(' ', '.') 

1335 return re.sub('[^A-Za-z0-9.]+', '-', version) 

1336 

1337 

1338def safe_extra(extra): 

1339 """Convert an arbitrary string to a standard 'extra' name 

1340 

1341 Any runs of non-alphanumeric characters are replaced with a single '_', 

1342 and the result is always lowercased. 

1343 """ 

1344 return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower() 

1345 

1346 

1347def to_filename(name): 

1348 """Convert a project or version name to its filename-escaped form 

1349 

1350 Any '-' characters are currently replaced with '_'. 

1351 """ 

1352 return name.replace('-', '_') 

1353 

1354 

1355def invalid_marker(text): 

1356 """ 

1357 Validate text as a PEP 508 environment marker; return an exception 

1358 if invalid or False otherwise. 

1359 """ 

1360 try: 

1361 evaluate_marker(text) 

1362 except SyntaxError as e: 

1363 e.filename = None 

1364 e.lineno = None 

1365 return e 

1366 return False 

1367 

1368 

1369def evaluate_marker(text, extra=None): 

1370 """ 

1371 Evaluate a PEP 508 environment marker. 

1372 Return a boolean indicating the marker result in this environment. 

1373 Raise SyntaxError if marker is invalid. 

1374 

1375 This implementation uses the 'pyparsing' module. 

1376 """ 

1377 try: 

1378 marker = packaging.markers.Marker(text) 

1379 return marker.evaluate() 

1380 except packaging.markers.InvalidMarker as e: 

1381 raise SyntaxError(e) 

1382 

1383 

1384class NullProvider: 

1385 """Try to implement resources and metadata for arbitrary PEP 302 loaders""" 

1386 

1387 egg_name = None 

1388 egg_info = None 

1389 loader = None 

1390 

1391 def __init__(self, module): 

1392 self.loader = getattr(module, '__loader__', None) 

1393 self.module_path = os.path.dirname(getattr(module, '__file__', '')) 

1394 

1395 def get_resource_filename(self, manager, resource_name): 

1396 return self._fn(self.module_path, resource_name) 

1397 

1398 def get_resource_stream(self, manager, resource_name): 

1399 return io.BytesIO(self.get_resource_string(manager, resource_name)) 

1400 

1401 def get_resource_string(self, manager, resource_name): 

1402 return self._get(self._fn(self.module_path, resource_name)) 

1403 

1404 def has_resource(self, resource_name): 

1405 return self._has(self._fn(self.module_path, resource_name)) 

1406 

1407 def _get_metadata_path(self, name): 

1408 return self._fn(self.egg_info, name) 

1409 

1410 def has_metadata(self, name): 

1411 if not self.egg_info: 

1412 return self.egg_info 

1413 

1414 path = self._get_metadata_path(name) 

1415 return self._has(path) 

1416 

1417 def get_metadata(self, name): 

1418 if not self.egg_info: 

1419 return "" 

1420 path = self._get_metadata_path(name) 

1421 value = self._get(path) 

1422 if six.PY2: 

1423 return value 

1424 try: 

1425 return value.decode('utf-8') 

1426 except UnicodeDecodeError as exc: 

1427 # Include the path in the error message to simplify 

1428 # troubleshooting, and without changing the exception type. 

1429 exc.reason += ' in {} file at path: {}'.format(name, path) 

1430 raise 

1431 

1432 def get_metadata_lines(self, name): 

1433 return yield_lines(self.get_metadata(name)) 

1434 

1435 def resource_isdir(self, resource_name): 

1436 return self._isdir(self._fn(self.module_path, resource_name)) 

1437 

1438 def metadata_isdir(self, name): 

1439 return self.egg_info and self._isdir(self._fn(self.egg_info, name)) 

1440 

1441 def resource_listdir(self, resource_name): 

1442 return self._listdir(self._fn(self.module_path, resource_name)) 

1443 

1444 def metadata_listdir(self, name): 

1445 if self.egg_info: 

1446 return self._listdir(self._fn(self.egg_info, name)) 

1447 return [] 

1448 

1449 def run_script(self, script_name, namespace): 

1450 script = 'scripts/' + script_name 

1451 if not self.has_metadata(script): 

1452 raise ResolutionError( 

1453 "Script {script!r} not found in metadata at {self.egg_info!r}" 

1454 .format(**locals()), 

1455 ) 

1456 script_text = self.get_metadata(script).replace('\r\n', '\n') 

1457 script_text = script_text.replace('\r', '\n') 

1458 script_filename = self._fn(self.egg_info, script) 

1459 namespace['__file__'] = script_filename 

1460 if os.path.exists(script_filename): 

1461 source = open(script_filename).read() 

1462 code = compile(source, script_filename, 'exec') 

1463 exec(code, namespace, namespace) 

1464 else: 

1465 from linecache import cache 

1466 cache[script_filename] = ( 

1467 len(script_text), 0, script_text.split('\n'), script_filename 

1468 ) 

1469 script_code = compile(script_text, script_filename, 'exec') 

1470 exec(script_code, namespace, namespace) 

1471 

1472 def _has(self, path): 

1473 raise NotImplementedError( 

1474 "Can't perform this operation for unregistered loader type" 

1475 ) 

1476 

1477 def _isdir(self, path): 

1478 raise NotImplementedError( 

1479 "Can't perform this operation for unregistered loader type" 

1480 ) 

1481 

1482 def _listdir(self, path): 

1483 raise NotImplementedError( 

1484 "Can't perform this operation for unregistered loader type" 

1485 ) 

1486 

1487 def _fn(self, base, resource_name): 

1488 self._validate_resource_path(resource_name) 

1489 if resource_name: 

1490 return os.path.join(base, *resource_name.split('/')) 

1491 return base 

1492 

1493 @staticmethod 

1494 def _validate_resource_path(path): 

1495 """ 

1496 Validate the resource paths according to the docs. 

1497 https://setuptools.readthedocs.io/en/latest/pkg_resources.html#basic-resource-access 

1498 

1499 >>> warned = getfixture('recwarn') 

1500 >>> warnings.simplefilter('always') 

1501 >>> vrp = NullProvider._validate_resource_path 

1502 >>> vrp('foo/bar.txt') 

1503 >>> bool(warned) 

1504 False 

1505 >>> vrp('../foo/bar.txt') 

1506 >>> bool(warned) 

1507 True 

1508 >>> warned.clear() 

1509 >>> vrp('/foo/bar.txt') 

1510 >>> bool(warned) 

1511 True 

1512 >>> vrp('foo/../../bar.txt') 

1513 >>> bool(warned) 

1514 True 

1515 >>> warned.clear() 

1516 >>> vrp('foo/f../bar.txt') 

1517 >>> bool(warned) 

1518 False 

1519 

1520 Windows path separators are straight-up disallowed. 

1521 >>> vrp(r'\\foo/bar.txt') 

1522 Traceback (most recent call last): 

1523 ... 

1524 ValueError: Use of .. or absolute path in a resource path \ 

1525is not allowed. 

1526 

1527 >>> vrp(r'C:\\foo/bar.txt') 

1528 Traceback (most recent call last): 

1529 ... 

1530 ValueError: Use of .. or absolute path in a resource path \ 

1531is not allowed. 

1532 

1533 Blank values are allowed 

1534 

1535 >>> vrp('') 

1536 >>> bool(warned) 

1537 False 

1538 

1539 Non-string values are not. 

1540 

1541 >>> vrp(None) 

1542 Traceback (most recent call last): 

1543 ... 

1544 AttributeError: ... 

1545 """ 

1546 invalid = ( 

1547 os.path.pardir in path.split(posixpath.sep) or 

1548 posixpath.isabs(path) or 

1549 ntpath.isabs(path) 

1550 ) 

1551 if not invalid: 

1552 return 

1553 

1554 msg = "Use of .. or absolute path in a resource path is not allowed." 

1555 

1556 # Aggressively disallow Windows absolute paths 

1557 if ntpath.isabs(path) and not posixpath.isabs(path): 

1558 raise ValueError(msg) 

1559 

1560 # for compatibility, warn; in future 

1561 # raise ValueError(msg) 

1562 warnings.warn( 

1563 msg[:-1] + " and will raise exceptions in a future release.", 

1564 DeprecationWarning, 

1565 stacklevel=4, 

1566 ) 

1567 

1568 def _get(self, path): 

1569 if hasattr(self.loader, 'get_data'): 

1570 return self.loader.get_data(path) 

1571 raise NotImplementedError( 

1572 "Can't perform this operation for loaders without 'get_data()'" 

1573 ) 

1574 

1575 

1576register_loader_type(object, NullProvider) 

1577 

1578 

1579class EggProvider(NullProvider): 

1580 """Provider based on a virtual filesystem""" 

1581 

1582 def __init__(self, module): 

1583 NullProvider.__init__(self, module) 

1584 self._setup_prefix() 

1585 

1586 def _setup_prefix(self): 

1587 # we assume here that our metadata may be nested inside a "basket" 

1588 # of multiple eggs; that's why we use module_path instead of .archive 

1589 path = self.module_path 

1590 old = None 

1591 while path != old: 

1592 if _is_egg_path(path): 

1593 self.egg_name = os.path.basename(path) 

1594 self.egg_info = os.path.join(path, 'EGG-INFO') 

1595 self.egg_root = path 

1596 break 

1597 old = path 

1598 path, base = os.path.split(path) 

1599 

1600 

1601class DefaultProvider(EggProvider): 

1602 """Provides access to package resources in the filesystem""" 

1603 

1604 def _has(self, path): 

1605 return os.path.exists(path) 

1606 

1607 def _isdir(self, path): 

1608 return os.path.isdir(path) 

1609 

1610 def _listdir(self, path): 

1611 return os.listdir(path) 

1612 

1613 def get_resource_stream(self, manager, resource_name): 

1614 return open(self._fn(self.module_path, resource_name), 'rb') 

1615 

1616 def _get(self, path): 

1617 with open(path, 'rb') as stream: 

1618 return stream.read() 

1619 

1620 @classmethod 

1621 def _register(cls): 

1622 loader_names = 'SourceFileLoader', 'SourcelessFileLoader', 

1623 for name in loader_names: 

1624 loader_cls = getattr(importlib_machinery, name, type(None)) 

1625 register_loader_type(loader_cls, cls) 

1626 

1627 

1628DefaultProvider._register() 

1629 

1630 

1631class EmptyProvider(NullProvider): 

1632 """Provider that returns nothing for all requests""" 

1633 

1634 module_path = None 

1635 

1636 _isdir = _has = lambda self, path: False 

1637 

1638 def _get(self, path): 

1639 return '' 

1640 

1641 def _listdir(self, path): 

1642 return [] 

1643 

1644 def __init__(self): 

1645 pass 

1646 

1647 

1648empty_provider = EmptyProvider() 

1649 

1650 

1651class ZipManifests(dict): 

1652 """ 

1653 zip manifest builder 

1654 """ 

1655 

1656 @classmethod 

1657 def build(cls, path): 

1658 """ 

1659 Build a dictionary similar to the zipimport directory 

1660 caches, except instead of tuples, store ZipInfo objects. 

1661 

1662 Use a platform-specific path separator (os.sep) for the path keys 

1663 for compatibility with pypy on Windows. 

1664 """ 

1665 with zipfile.ZipFile(path) as zfile: 

1666 items = ( 

1667 ( 

1668 name.replace('/', os.sep), 

1669 zfile.getinfo(name), 

1670 ) 

1671 for name in zfile.namelist() 

1672 ) 

1673 return dict(items) 

1674 

1675 load = build 

1676 

1677 

1678class MemoizedZipManifests(ZipManifests): 

1679 """ 

1680 Memoized zipfile manifests. 

1681 """ 

1682 manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime') 

1683 

1684 def load(self, path): 

1685 """ 

1686 Load a manifest at path or return a suitable manifest already loaded. 

1687 """ 

1688 path = os.path.normpath(path) 

1689 mtime = os.stat(path).st_mtime 

1690 

1691 if path not in self or self[path].mtime != mtime: 

1692 manifest = self.build(path) 

1693 self[path] = self.manifest_mod(manifest, mtime) 

1694 

1695 return self[path].manifest 

1696 

1697 

1698class ZipProvider(EggProvider): 

1699 """Resource support for zips and eggs""" 

1700 

1701 eagers = None 

1702 _zip_manifests = MemoizedZipManifests() 

1703 

1704 def __init__(self, module): 

1705 EggProvider.__init__(self, module) 

1706 self.zip_pre = self.loader.archive + os.sep 

1707 

1708 def _zipinfo_name(self, fspath): 

1709 # Convert a virtual filename (full path to file) into a zipfile subpath 

1710 # usable with the zipimport directory cache for our target archive 

1711 fspath = fspath.rstrip(os.sep) 

1712 if fspath == self.loader.archive: 

1713 return '' 

1714 if fspath.startswith(self.zip_pre): 

1715 return fspath[len(self.zip_pre):] 

1716 raise AssertionError( 

1717 "%s is not a subpath of %s" % (fspath, self.zip_pre) 

1718 ) 

1719 

1720 def _parts(self, zip_path): 

1721 # Convert a zipfile subpath into an egg-relative path part list. 

1722 # pseudo-fs path 

1723 fspath = self.zip_pre + zip_path 

1724 if fspath.startswith(self.egg_root + os.sep): 

1725 return fspath[len(self.egg_root) + 1:].split(os.sep) 

1726 raise AssertionError( 

1727 "%s is not a subpath of %s" % (fspath, self.egg_root) 

1728 ) 

1729 

1730 @property 

1731 def zipinfo(self): 

1732 return self._zip_manifests.load(self.loader.archive) 

1733 

1734 def get_resource_filename(self, manager, resource_name): 

1735 if not self.egg_name: 

1736 raise NotImplementedError( 

1737 "resource_filename() only supported for .egg, not .zip" 

1738 ) 

1739 # no need to lock for extraction, since we use temp names 

1740 zip_path = self._resource_to_zip(resource_name) 

1741 eagers = self._get_eager_resources() 

1742 if '/'.join(self._parts(zip_path)) in eagers: 

1743 for name in eagers: 

1744 self._extract_resource(manager, self._eager_to_zip(name)) 

1745 return self._extract_resource(manager, zip_path) 

1746 

1747 @staticmethod 

1748 def _get_date_and_size(zip_stat): 

1749 size = zip_stat.file_size 

1750 # ymdhms+wday, yday, dst 

1751 date_time = zip_stat.date_time + (0, 0, -1) 

1752 # 1980 offset already done 

1753 timestamp = time.mktime(date_time) 

1754 return timestamp, size 

1755 

1756 def _extract_resource(self, manager, zip_path): 

1757 

1758 if zip_path in self._index(): 

1759 for name in self._index()[zip_path]: 

1760 last = self._extract_resource( 

1761 manager, os.path.join(zip_path, name) 

1762 ) 

1763 # return the extracted directory name 

1764 return os.path.dirname(last) 

1765 

1766 timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) 

1767 

1768 if not WRITE_SUPPORT: 

1769 raise IOError('"os.rename" and "os.unlink" are not supported ' 

1770 'on this platform') 

1771 try: 

1772 

1773 real_path = manager.get_cache_path( 

1774 self.egg_name, self._parts(zip_path) 

1775 ) 

1776 

1777 if self._is_current(real_path, zip_path): 

1778 return real_path 

1779 

1780 outf, tmpnam = _mkstemp( 

1781 ".$extract", 

1782 dir=os.path.dirname(real_path), 

1783 ) 

1784 os.write(outf, self.loader.get_data(zip_path)) 

1785 os.close(outf) 

1786 utime(tmpnam, (timestamp, timestamp)) 

1787 manager.postprocess(tmpnam, real_path) 

1788 

1789 try: 

1790 rename(tmpnam, real_path) 

1791 

1792 except os.error: 

1793 if os.path.isfile(real_path): 

1794 if self._is_current(real_path, zip_path): 

1795 # the file became current since it was checked above, 

1796 # so proceed. 

1797 return real_path 

1798 # Windows, del old file and retry 

1799 elif os.name == 'nt': 

1800 unlink(real_path) 

1801 rename(tmpnam, real_path) 

1802 return real_path 

1803 raise 

1804 

1805 except os.error: 

1806 # report a user-friendly error 

1807 manager.extraction_error() 

1808 

1809 return real_path 

1810 

1811 def _is_current(self, file_path, zip_path): 

1812 """ 

1813 Return True if the file_path is current for this zip_path 

1814 """ 

1815 timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) 

1816 if not os.path.isfile(file_path): 

1817 return False 

1818 stat = os.stat(file_path) 

1819 if stat.st_size != size or stat.st_mtime != timestamp: 

1820 return False 

1821 # check that the contents match 

1822 zip_contents = self.loader.get_data(zip_path) 

1823 with open(file_path, 'rb') as f: 

1824 file_contents = f.read() 

1825 return zip_contents == file_contents 

1826 

1827 def _get_eager_resources(self): 

1828 if self.eagers is None: 

1829 eagers = [] 

1830 for name in ('native_libs.txt', 'eager_resources.txt'): 

1831 if self.has_metadata(name): 

1832 eagers.extend(self.get_metadata_lines(name)) 

1833 self.eagers = eagers 

1834 return self.eagers 

1835 

1836 def _index(self): 

1837 try: 

1838 return self._dirindex 

1839 except AttributeError: 

1840 ind = {} 

1841 for path in self.zipinfo: 

1842 parts = path.split(os.sep) 

1843 while parts: 

1844 parent = os.sep.join(parts[:-1]) 

1845 if parent in ind: 

1846 ind[parent].append(parts[-1]) 

1847 break 

1848 else: 

1849 ind[parent] = [parts.pop()] 

1850 self._dirindex = ind 

1851 return ind 

1852 

1853 def _has(self, fspath): 

1854 zip_path = self._zipinfo_name(fspath) 

1855 return zip_path in self.zipinfo or zip_path in self._index() 

1856 

1857 def _isdir(self, fspath): 

1858 return self._zipinfo_name(fspath) in self._index() 

1859 

1860 def _listdir(self, fspath): 

1861 return list(self._index().get(self._zipinfo_name(fspath), ())) 

1862 

1863 def _eager_to_zip(self, resource_name): 

1864 return self._zipinfo_name(self._fn(self.egg_root, resource_name)) 

1865 

1866 def _resource_to_zip(self, resource_name): 

1867 return self._zipinfo_name(self._fn(self.module_path, resource_name)) 

1868 

1869 

1870register_loader_type(zipimport.zipimporter, ZipProvider) 

1871 

1872 

1873class FileMetadata(EmptyProvider): 

1874 """Metadata handler for standalone PKG-INFO files 

1875 

1876 Usage:: 

1877 

1878 metadata = FileMetadata("/path/to/PKG-INFO") 

1879 

1880 This provider rejects all data and metadata requests except for PKG-INFO, 

1881 which is treated as existing, and will be the contents of the file at 

1882 the provided location. 

1883 """ 

1884 

1885 def __init__(self, path): 

1886 self.path = path 

1887 

1888 def _get_metadata_path(self, name): 

1889 return self.path 

1890 

1891 def has_metadata(self, name): 

1892 return name == 'PKG-INFO' and os.path.isfile(self.path) 

1893 

1894 def get_metadata(self, name): 

1895 if name != 'PKG-INFO': 

1896 raise KeyError("No metadata except PKG-INFO is available") 

1897 

1898 with io.open(self.path, encoding='utf-8', errors="replace") as f: 

1899 metadata = f.read() 

1900 self._warn_on_replacement(metadata) 

1901 return metadata 

1902 

1903 def _warn_on_replacement(self, metadata): 

1904 # Python 2.7 compat for: replacement_char = '�' 

1905 replacement_char = b'\xef\xbf\xbd'.decode('utf-8') 

1906 if replacement_char in metadata: 

1907 tmpl = "{self.path} could not be properly decoded in UTF-8" 

1908 msg = tmpl.format(**locals()) 

1909 warnings.warn(msg) 

1910 

1911 def get_metadata_lines(self, name): 

1912 return yield_lines(self.get_metadata(name)) 

1913 

1914 

1915class PathMetadata(DefaultProvider): 

1916 """Metadata provider for egg directories 

1917 

1918 Usage:: 

1919 

1920 # Development eggs: 

1921 

1922 egg_info = "/path/to/PackageName.egg-info" 

1923 base_dir = os.path.dirname(egg_info) 

1924 metadata = PathMetadata(base_dir, egg_info) 

1925 dist_name = os.path.splitext(os.path.basename(egg_info))[0] 

1926 dist = Distribution(basedir, project_name=dist_name, metadata=metadata) 

1927 

1928 # Unpacked egg directories: 

1929 

1930 egg_path = "/path/to/PackageName-ver-pyver-etc.egg" 

1931 metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO')) 

1932 dist = Distribution.from_filename(egg_path, metadata=metadata) 

1933 """ 

1934 

1935 def __init__(self, path, egg_info): 

1936 self.module_path = path 

1937 self.egg_info = egg_info 

1938 

1939 

1940class EggMetadata(ZipProvider): 

1941 """Metadata provider for .egg files""" 

1942 

1943 def __init__(self, importer): 

1944 """Create a metadata provider from a zipimporter""" 

1945 

1946 self.zip_pre = importer.archive + os.sep 

1947 self.loader = importer 

1948 if importer.prefix: 

1949 self.module_path = os.path.join(importer.archive, importer.prefix) 

1950 else: 

1951 self.module_path = importer.archive 

1952 self._setup_prefix() 

1953 

1954 

1955_declare_state('dict', _distribution_finders={}) 

1956 

1957 

1958def register_finder(importer_type, distribution_finder): 

1959 """Register `distribution_finder` to find distributions in sys.path items 

1960 

1961 `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item 

1962 handler), and `distribution_finder` is a callable that, passed a path 

1963 item and the importer instance, yields ``Distribution`` instances found on 

1964 that path item. See ``pkg_resources.find_on_path`` for an example.""" 

1965 _distribution_finders[importer_type] = distribution_finder 

1966 

1967 

1968def find_distributions(path_item, only=False): 

1969 """Yield distributions accessible via `path_item`""" 

1970 importer = get_importer(path_item) 

1971 finder = _find_adapter(_distribution_finders, importer) 

1972 return finder(importer, path_item, only) 

1973 

1974 

1975def find_eggs_in_zip(importer, path_item, only=False): 

1976 """ 

1977 Find eggs in zip files; possibly multiple nested eggs. 

1978 """ 

1979 if importer.archive.endswith('.whl'): 

1980 # wheels are not supported with this finder 

1981 # they don't have PKG-INFO metadata, and won't ever contain eggs 

1982 return 

1983 metadata = EggMetadata(importer) 

1984 if metadata.has_metadata('PKG-INFO'): 

1985 yield Distribution.from_filename(path_item, metadata=metadata) 

1986 if only: 

1987 # don't yield nested distros 

1988 return 

1989 for subitem in metadata.resource_listdir(''): 

1990 if _is_egg_path(subitem): 

1991 subpath = os.path.join(path_item, subitem) 

1992 dists = find_eggs_in_zip(zipimport.zipimporter(subpath), subpath) 

1993 for dist in dists: 

1994 yield dist 

1995 elif subitem.lower().endswith('.dist-info'): 

1996 subpath = os.path.join(path_item, subitem) 

1997 submeta = EggMetadata(zipimport.zipimporter(subpath)) 

1998 submeta.egg_info = subpath 

1999 yield Distribution.from_location(path_item, subitem, submeta) 

2000 

2001 

2002register_finder(zipimport.zipimporter, find_eggs_in_zip) 

2003 

2004 

2005def find_nothing(importer, path_item, only=False): 

2006 return () 

2007 

2008 

2009register_finder(object, find_nothing) 

2010 

2011 

2012def _by_version_descending(names): 

2013 """ 

2014 Given a list of filenames, return them in descending order 

2015 by version number. 

2016 

2017 >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg' 

2018 >>> _by_version_descending(names) 

2019 ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar'] 

2020 >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg' 

2021 >>> _by_version_descending(names) 

2022 ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg'] 

2023 >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg' 

2024 >>> _by_version_descending(names) 

2025 ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg'] 

2026 """ 

2027 def _by_version(name): 

2028 """ 

2029 Parse each component of the filename 

2030 """ 

2031 name, ext = os.path.splitext(name) 

2032 parts = itertools.chain(name.split('-'), [ext]) 

2033 return [packaging.version.parse(part) for part in parts] 

2034 

2035 return sorted(names, key=_by_version, reverse=True) 

2036 

2037 

2038def find_on_path(importer, path_item, only=False): 

2039 """Yield distributions accessible on a sys.path directory""" 

2040 path_item = _normalize_cached(path_item) 

2041 

2042 if _is_unpacked_egg(path_item): 

2043 yield Distribution.from_filename( 

2044 path_item, metadata=PathMetadata( 

2045 path_item, os.path.join(path_item, 'EGG-INFO') 

2046 ) 

2047 ) 

2048 return 

2049 

2050 entries = safe_listdir(path_item) 

2051 

2052 # for performance, before sorting by version, 

2053 # screen entries for only those that will yield 

2054 # distributions 

2055 filtered = ( 

2056 entry 

2057 for entry in entries 

2058 if dist_factory(path_item, entry, only) 

2059 ) 

2060 

2061 # scan for .egg and .egg-info in directory 

2062 path_item_entries = _by_version_descending(filtered) 

2063 for entry in path_item_entries: 

2064 fullpath = os.path.join(path_item, entry) 

2065 factory = dist_factory(path_item, entry, only) 

2066 for dist in factory(fullpath): 

2067 yield dist 

2068 

2069 

2070def dist_factory(path_item, entry, only): 

2071 """ 

2072 Return a dist_factory for a path_item and entry 

2073 """ 

2074 lower = entry.lower() 

2075 is_meta = any(map(lower.endswith, ('.egg-info', '.dist-info'))) 

2076 return ( 

2077 distributions_from_metadata 

2078 if is_meta else 

2079 find_distributions 

2080 if not only and _is_egg_path(entry) else 

2081 resolve_egg_link 

2082 if not only and lower.endswith('.egg-link') else 

2083 NoDists() 

2084 ) 

2085 

2086 

2087class NoDists: 

2088 """ 

2089 >>> bool(NoDists()) 

2090 False 

2091 

2092 >>> list(NoDists()('anything')) 

2093 [] 

2094 """ 

2095 def __bool__(self): 

2096 return False 

2097 if six.PY2: 

2098 __nonzero__ = __bool__ 

2099 

2100 def __call__(self, fullpath): 

2101 return iter(()) 

2102 

2103 

2104def safe_listdir(path): 

2105 """ 

2106 Attempt to list contents of path, but suppress some exceptions. 

2107 """ 

2108 try: 

2109 return os.listdir(path) 

2110 except (PermissionError, NotADirectoryError): 

2111 pass 

2112 except OSError as e: 

2113 # Ignore the directory if does not exist, not a directory or 

2114 # permission denied 

2115 ignorable = ( 

2116 e.errno in (errno.ENOTDIR, errno.EACCES, errno.ENOENT) 

2117 # Python 2 on Windows needs to be handled this way :( 

2118 or getattr(e, "winerror", None) == 267 

2119 ) 

2120 if not ignorable: 

2121 raise 

2122 return () 

2123 

2124 

2125def distributions_from_metadata(path): 

2126 root = os.path.dirname(path) 

2127 if os.path.isdir(path): 

2128 if len(os.listdir(path)) == 0: 

2129 # empty metadata dir; skip 

2130 return 

2131 metadata = PathMetadata(root, path) 

2132 else: 

2133 metadata = FileMetadata(path) 

2134 entry = os.path.basename(path) 

2135 yield Distribution.from_location( 

2136 root, entry, metadata, precedence=DEVELOP_DIST, 

2137 ) 

2138 

2139 

2140def non_empty_lines(path): 

2141 """ 

2142 Yield non-empty lines from file at path 

2143 """ 

2144 with open(path) as f: 

2145 for line in f: 

2146 line = line.strip() 

2147 if line: 

2148 yield line 

2149 

2150 

2151def resolve_egg_link(path): 

2152 """ 

2153 Given a path to an .egg-link, resolve distributions 

2154 present in the referenced path. 

2155 """ 

2156 referenced_paths = non_empty_lines(path) 

2157 resolved_paths = ( 

2158 os.path.join(os.path.dirname(path), ref) 

2159 for ref in referenced_paths 

2160 ) 

2161 dist_groups = map(find_distributions, resolved_paths) 

2162 return next(dist_groups, ()) 

2163 

2164 

2165register_finder(pkgutil.ImpImporter, find_on_path) 

2166 

2167if hasattr(importlib_machinery, 'FileFinder'): 

2168 register_finder(importlib_machinery.FileFinder, find_on_path) 

2169 

2170_declare_state('dict', _namespace_handlers={}) 

2171_declare_state('dict', _namespace_packages={}) 

2172 

2173 

2174def register_namespace_handler(importer_type, namespace_handler): 

2175 """Register `namespace_handler` to declare namespace packages 

2176 

2177 `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item 

2178 handler), and `namespace_handler` is a callable like this:: 

2179 

2180 def namespace_handler(importer, path_entry, moduleName, module): 

2181 # return a path_entry to use for child packages 

2182 

2183 Namespace handlers are only called if the importer object has already 

2184 agreed that it can handle the relevant path item, and they should only 

2185 return a subpath if the module __path__ does not already contain an 

2186 equivalent subpath. For an example namespace handler, see 

2187 ``pkg_resources.file_ns_handler``. 

2188 """ 

2189 _namespace_handlers[importer_type] = namespace_handler 

2190 

2191 

2192def _handle_ns(packageName, path_item): 

2193 """Ensure that named package includes a subpath of path_item (if needed)""" 

2194 

2195 importer = get_importer(path_item) 

2196 if importer is None: 

2197 return None 

2198 

2199 # capture warnings due to #1111 

2200 with warnings.catch_warnings(): 

2201 warnings.simplefilter("ignore") 

2202 loader = importer.find_module(packageName) 

2203 

2204 if loader is None: 

2205 return None 

2206 module = sys.modules.get(packageName) 

2207 if module is None: 

2208 module = sys.modules[packageName] = types.ModuleType(packageName) 

2209 module.__path__ = [] 

2210 _set_parent_ns(packageName) 

2211 elif not hasattr(module, '__path__'): 

2212 raise TypeError("Not a package:", packageName) 

2213 handler = _find_adapter(_namespace_handlers, importer) 

2214 subpath = handler(importer, path_item, packageName, module) 

2215 if subpath is not None: 

2216 path = module.__path__ 

2217 path.append(subpath) 

2218 loader.load_module(packageName) 

2219 _rebuild_mod_path(path, packageName, module) 

2220 return subpath 

2221 

2222 

2223def _rebuild_mod_path(orig_path, package_name, module): 

2224 """ 

2225 Rebuild module.__path__ ensuring that all entries are ordered 

2226 corresponding to their sys.path order 

2227 """ 

2228 sys_path = [_normalize_cached(p) for p in sys.path] 

2229 

2230 def safe_sys_path_index(entry): 

2231 """ 

2232 Workaround for #520 and #513. 

2233 """ 

2234 try: 

2235 return sys_path.index(entry) 

2236 except ValueError: 

2237 return float('inf') 

2238 

2239 def position_in_sys_path(path): 

2240 """ 

2241 Return the ordinal of the path based on its position in sys.path 

2242 """ 

2243 path_parts = path.split(os.sep) 

2244 module_parts = package_name.count('.') + 1 

2245 parts = path_parts[:-module_parts] 

2246 return safe_sys_path_index(_normalize_cached(os.sep.join(parts))) 

2247 

2248 new_path = sorted(orig_path, key=position_in_sys_path) 

2249 new_path = [_normalize_cached(p) for p in new_path] 

2250 

2251 if isinstance(module.__path__, list): 

2252 module.__path__[:] = new_path 

2253 else: 

2254 module.__path__ = new_path 

2255 

2256 

2257def declare_namespace(packageName): 

2258 """Declare that package 'packageName' is a namespace package""" 

2259 

2260 _imp.acquire_lock() 

2261 try: 

2262 if packageName in _namespace_packages: 

2263 return 

2264 

2265 path = sys.path 

2266 parent, _, _ = packageName.rpartition('.') 

2267 

2268 if parent: 

2269 declare_namespace(parent) 

2270 if parent not in _namespace_packages: 

2271 __import__(parent) 

2272 try: 

2273 path = sys.modules[parent].__path__ 

2274 except AttributeError: 

2275 raise TypeError("Not a package:", parent) 

2276 

2277 # Track what packages are namespaces, so when new path items are added, 

2278 # they can be updated 

2279 _namespace_packages.setdefault(parent or None, []).append(packageName) 

2280 _namespace_packages.setdefault(packageName, []) 

2281 

2282 for path_item in path: 

2283 # Ensure all the parent's path items are reflected in the child, 

2284 # if they apply 

2285 _handle_ns(packageName, path_item) 

2286 

2287 finally: 

2288 _imp.release_lock() 

2289 

2290 

2291def fixup_namespace_packages(path_item, parent=None): 

2292 """Ensure that previously-declared namespace packages include path_item""" 

2293 _imp.acquire_lock() 

2294 try: 

2295 for package in _namespace_packages.get(parent, ()): 

2296 subpath = _handle_ns(package, path_item) 

2297 if subpath: 

2298 fixup_namespace_packages(subpath, package) 

2299 finally: 

2300 _imp.release_lock() 

2301 

2302 

2303def file_ns_handler(importer, path_item, packageName, module): 

2304 """Compute an ns-package subpath for a filesystem or zipfile importer""" 

2305 

2306 subpath = os.path.join(path_item, packageName.split('.')[-1]) 

2307 normalized = _normalize_cached(subpath) 

2308 for item in module.__path__: 

2309 if _normalize_cached(item) == normalized: 

2310 break 

2311 else: 

2312 # Only return the path if it's not already there 

2313 return subpath 

2314 

2315 

2316register_namespace_handler(pkgutil.ImpImporter, file_ns_handler) 

2317register_namespace_handler(zipimport.zipimporter, file_ns_handler) 

2318 

2319if hasattr(importlib_machinery, 'FileFinder'): 

2320 register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler) 

2321 

2322 

2323def null_ns_handler(importer, path_item, packageName, module): 

2324 return None 

2325 

2326 

2327register_namespace_handler(object, null_ns_handler) 

2328 

2329 

2330def normalize_path(filename): 

2331 """Normalize a file/dir name for comparison purposes""" 

2332 return os.path.normcase(os.path.realpath(os.path.normpath( 

2333 _cygwin_patch(filename)))) 

2334 

2335 

2336def _cygwin_patch(filename): # pragma: nocover 

2337 """ 

2338 Contrary to POSIX 2008, on Cygwin, getcwd (3) contains 

2339 symlink components. Using 

2340 os.path.abspath() works around this limitation. A fix in os.getcwd() 

2341 would probably better, in Cygwin even more so, except 

2342 that this seems to be by design... 

2343 """ 

2344 return os.path.abspath(filename) if sys.platform == 'cygwin' else filename 

2345 

2346 

2347def _normalize_cached(filename, _cache={}): 

2348 try: 

2349 return _cache[filename] 

2350 except KeyError: 

2351 _cache[filename] = result = normalize_path(filename) 

2352 return result 

2353 

2354 

2355def _is_egg_path(path): 

2356 """ 

2357 Determine if given path appears to be an egg. 

2358 """ 

2359 return path.lower().endswith('.egg') 

2360 

2361 

2362def _is_unpacked_egg(path): 

2363 """ 

2364 Determine if given path appears to be an unpacked egg. 

2365 """ 

2366 return ( 

2367 _is_egg_path(path) and 

2368 os.path.isfile(os.path.join(path, 'EGG-INFO', 'PKG-INFO')) 

2369 ) 

2370 

2371 

2372def _set_parent_ns(packageName): 

2373 parts = packageName.split('.') 

2374 name = parts.pop() 

2375 if parts: 

2376 parent = '.'.join(parts) 

2377 setattr(sys.modules[parent], name, sys.modules[packageName]) 

2378 

2379 

2380def yield_lines(strs): 

2381 """Yield non-empty/non-comment lines of a string or sequence""" 

2382 if isinstance(strs, six.string_types): 

2383 for s in strs.splitlines(): 

2384 s = s.strip() 

2385 # skip blank lines/comments 

2386 if s and not s.startswith('#'): 

2387 yield s 

2388 else: 

2389 for ss in strs: 

2390 for s in yield_lines(ss): 

2391 yield s 

2392 

2393 

2394MODULE = re.compile(r"\w+(\.\w+)*$").match 

2395EGG_NAME = re.compile( 

2396 r""" 

2397 (?P<name>[^-]+) ( 

2398 -(?P<ver>[^-]+) ( 

2399 -py(?P<pyver>[^-]+) ( 

2400 -(?P<plat>.+) 

2401 )? 

2402 )? 

2403 )? 

2404 """, 

2405 re.VERBOSE | re.IGNORECASE, 

2406).match 

2407 

2408 

2409class EntryPoint: 

2410 """Object representing an advertised importable object""" 

2411 

2412 def __init__(self, name, module_name, attrs=(), extras=(), dist=None): 

2413 if not MODULE(module_name): 

2414 raise ValueError("Invalid module name", module_name) 

2415 self.name = name 

2416 self.module_name = module_name 

2417 self.attrs = tuple(attrs) 

2418 self.extras = tuple(extras) 

2419 self.dist = dist 

2420 

2421 def __str__(self): 

2422 s = "%s = %s" % (self.name, self.module_name) 

2423 if self.attrs: 

2424 s += ':' + '.'.join(self.attrs) 

2425 if self.extras: 

2426 s += ' [%s]' % ','.join(self.extras) 

2427 return s 

2428 

2429 def __repr__(self): 

2430 return "EntryPoint.parse(%r)" % str(self) 

2431 

2432 def load(self, require=True, *args, **kwargs): 

2433 """ 

2434 Require packages for this EntryPoint, then resolve it. 

2435 """ 

2436 if not require or args or kwargs: 

2437 warnings.warn( 

2438 "Parameters to load are deprecated. Call .resolve and " 

2439 ".require separately.", 

2440 PkgResourcesDeprecationWarning, 

2441 stacklevel=2, 

2442 ) 

2443 if require: 

2444 self.require(*args, **kwargs) 

2445 return self.resolve() 

2446 

2447 def resolve(self): 

2448 """ 

2449 Resolve the entry point from its module and attrs. 

2450 """ 

2451 module = __import__(self.module_name, fromlist=['__name__'], level=0) 

2452 try: 

2453 return functools.reduce(getattr, self.attrs, module) 

2454 except AttributeError as exc: 

2455 raise ImportError(str(exc)) 

2456 

2457 def require(self, env=None, installer=None): 

2458 if self.extras and not self.dist: 

2459 raise UnknownExtra("Can't require() without a distribution", self) 

2460 

2461 # Get the requirements for this entry point with all its extras and 

2462 # then resolve them. We have to pass `extras` along when resolving so 

2463 # that the working set knows what extras we want. Otherwise, for 

2464 # dist-info distributions, the working set will assume that the 

2465 # requirements for that extra are purely optional and skip over them. 

2466 reqs = self.dist.requires(self.extras) 

2467 items = working_set.resolve(reqs, env, installer, extras=self.extras) 

2468 list(map(working_set.add, items)) 

2469 

2470 pattern = re.compile( 

2471 r'\s*' 

2472 r'(?P<name>.+?)\s*' 

2473 r'=\s*' 

2474 r'(?P<module>[\w.]+)\s*' 

2475 r'(:\s*(?P<attr>[\w.]+))?\s*' 

2476 r'(?P<extras>\[.*\])?\s*$' 

2477 ) 

2478 

2479 @classmethod 

2480 def parse(cls, src, dist=None): 

2481 """Parse a single entry point from string `src` 

2482 

2483 Entry point syntax follows the form:: 

2484 

2485 name = some.module:some.attr [extra1, extra2] 

2486 

2487 The entry name and module name are required, but the ``:attrs`` and 

2488 ``[extras]`` parts are optional 

2489 """ 

2490 m = cls.pattern.match(src) 

2491 if not m: 

2492 msg = "EntryPoint must be in 'name=module:attrs [extras]' format" 

2493 raise ValueError(msg, src) 

2494 res = m.groupdict() 

2495 extras = cls._parse_extras(res['extras']) 

2496 attrs = res['attr'].split('.') if res['attr'] else () 

2497 return cls(res['name'], res['module'], attrs, extras, dist) 

2498 

2499 @classmethod 

2500 def _parse_extras(cls, extras_spec): 

2501 if not extras_spec: 

2502 return () 

2503 req = Requirement.parse('x' + extras_spec) 

2504 if req.specs: 

2505 raise ValueError() 

2506 return req.extras 

2507 

2508 @classmethod 

2509 def parse_group(cls, group, lines, dist=None): 

2510 """Parse an entry point group""" 

2511 if not MODULE(group): 

2512 raise ValueError("Invalid group name", group) 

2513 this = {} 

2514 for line in yield_lines(lines): 

2515 ep = cls.parse(line, dist) 

2516 if ep.name in this: 

2517 raise ValueError("Duplicate entry point", group, ep.name) 

2518 this[ep.name] = ep 

2519 return this 

2520 

2521 @classmethod 

2522 def parse_map(cls, data, dist=None): 

2523 """Parse a map of entry point groups""" 

2524 if isinstance(data, dict): 

2525 data = data.items() 

2526 else: 

2527 data = split_sections(data) 

2528 maps = {} 

2529 for group, lines in data: 

2530 if group is None: 

2531 if not lines: 

2532 continue 

2533 raise ValueError("Entry points must be listed in groups") 

2534 group = group.strip() 

2535 if group in maps: 

2536 raise ValueError("Duplicate group name", group) 

2537 maps[group] = cls.parse_group(group, lines, dist) 

2538 return maps 

2539 

2540 

2541def _remove_md5_fragment(location): 

2542 if not location: 

2543 return '' 

2544 parsed = urllib.parse.urlparse(location) 

2545 if parsed[-1].startswith('md5='): 

2546 return urllib.parse.urlunparse(parsed[:-1] + ('',)) 

2547 return location 

2548 

2549 

2550def _version_from_file(lines): 

2551 """ 

2552 Given an iterable of lines from a Metadata file, return 

2553 the value of the Version field, if present, or None otherwise. 

2554 """ 

2555 def is_version_line(line): 

2556 return line.lower().startswith('version:') 

2557 version_lines = filter(is_version_line, lines) 

2558 line = next(iter(version_lines), '') 

2559 _, _, value = line.partition(':') 

2560 return safe_version(value.strip()) or None 

2561 

2562 

2563class Distribution: 

2564 """Wrap an actual or potential sys.path entry w/metadata""" 

2565 PKG_INFO = 'PKG-INFO' 

2566 

2567 def __init__( 

2568 self, location=None, metadata=None, project_name=None, 

2569 version=None, py_version=PY_MAJOR, platform=None, 

2570 precedence=EGG_DIST): 

2571 self.project_name = safe_name(project_name or 'Unknown') 

2572 if version is not None: 

2573 self._version = safe_version(version) 

2574 self.py_version = py_version 

2575 self.platform = platform 

2576 self.location = location 

2577 self.precedence = precedence 

2578 self._provider = metadata or empty_provider 

2579 

2580 @classmethod 

2581 def from_location(cls, location, basename, metadata=None, **kw): 

2582 project_name, version, py_version, platform = [None] * 4 

2583 basename, ext = os.path.splitext(basename) 

2584 if ext.lower() in _distributionImpl: 

2585 cls = _distributionImpl[ext.lower()] 

2586 

2587 match = EGG_NAME(basename) 

2588 if match: 

2589 project_name, version, py_version, platform = match.group( 

2590 'name', 'ver', 'pyver', 'plat' 

2591 ) 

2592 return cls( 

2593 location, metadata, project_name=project_name, version=version, 

2594 py_version=py_version, platform=platform, **kw 

2595 )._reload_version() 

2596 

2597 def _reload_version(self): 

2598 return self 

2599 

2600 @property 

2601 def hashcmp(self): 

2602 return ( 

2603 self.parsed_version, 

2604 self.precedence, 

2605 self.key, 

2606 _remove_md5_fragment(self.location), 

2607 self.py_version or '', 

2608 self.platform or '', 

2609 ) 

2610 

2611 def __hash__(self): 

2612 return hash(self.hashcmp) 

2613 

2614 def __lt__(self, other): 

2615 return self.hashcmp < other.hashcmp 

2616 

2617 def __le__(self, other): 

2618 return self.hashcmp <= other.hashcmp 

2619 

2620 def __gt__(self, other): 

2621 return self.hashcmp > other.hashcmp 

2622 

2623 def __ge__(self, other): 

2624 return self.hashcmp >= other.hashcmp 

2625 

2626 def __eq__(self, other): 

2627 if not isinstance(other, self.__class__): 

2628 # It's not a Distribution, so they are not equal 

2629 return False 

2630 return self.hashcmp == other.hashcmp 

2631 

2632 def __ne__(self, other): 

2633 return not self == other 

2634 

2635 # These properties have to be lazy so that we don't have to load any 

2636 # metadata until/unless it's actually needed. (i.e., some distributions 

2637 # may not know their name or version without loading PKG-INFO) 

2638 

2639 @property 

2640 def key(self): 

2641 try: 

2642 return self._key 

2643 except AttributeError: 

2644 self._key = key = self.project_name.lower() 

2645 return key 

2646 

2647 @property 

2648 def parsed_version(self): 

2649 if not hasattr(self, "_parsed_version"): 

2650 self._parsed_version = parse_version(self.version) 

2651 

2652 return self._parsed_version 

2653 

2654 def _warn_legacy_version(self): 

2655 LV = packaging.version.LegacyVersion 

2656 is_legacy = isinstance(self._parsed_version, LV) 

2657 if not is_legacy: 

2658 return 

2659 

2660 # While an empty version is technically a legacy version and 

2661 # is not a valid PEP 440 version, it's also unlikely to 

2662 # actually come from someone and instead it is more likely that 

2663 # it comes from setuptools attempting to parse a filename and 

2664 # including it in the list. So for that we'll gate this warning 

2665 # on if the version is anything at all or not. 

2666 if not self.version: 

2667 return 

2668 

2669 tmpl = textwrap.dedent(""" 

2670 '{project_name} ({version})' is being parsed as a legacy, 

2671 non PEP 440, 

2672 version. You may find odd behavior and sort order. 

2673 In particular it will be sorted as less than 0.0. It 

2674 is recommended to migrate to PEP 440 compatible 

2675 versions. 

2676 """).strip().replace('\n', ' ') 

2677 

2678 warnings.warn(tmpl.format(**vars(self)), PEP440Warning) 

2679 

2680 @property 

2681 def version(self): 

2682 try: 

2683 return self._version 

2684 except AttributeError: 

2685 version = self._get_version() 

2686 if version is None: 

2687 path = self._get_metadata_path_for_display(self.PKG_INFO) 

2688 msg = ( 

2689 "Missing 'Version:' header and/or {} file at path: {}" 

2690 ).format(self.PKG_INFO, path) 

2691 raise ValueError(msg, self) 

2692 

2693 return version 

2694 

2695 @property 

2696 def _dep_map(self): 

2697 """ 

2698 A map of extra to its list of (direct) requirements 

2699 for this distribution, including the null extra. 

2700 """ 

2701 try: 

2702 return self.__dep_map 

2703 except AttributeError: 

2704 self.__dep_map = self._filter_extras(self._build_dep_map()) 

2705 return self.__dep_map 

2706 

2707 @staticmethod 

2708 def _filter_extras(dm): 

2709 """ 

2710 Given a mapping of extras to dependencies, strip off 

2711 environment markers and filter out any dependencies 

2712 not matching the markers. 

2713 """ 

2714 for extra in list(filter(None, dm)): 

2715 new_extra = extra 

2716 reqs = dm.pop(extra) 

2717 new_extra, _, marker = extra.partition(':') 

2718 fails_marker = marker and ( 

2719 invalid_marker(marker) 

2720 or not evaluate_marker(marker) 

2721 ) 

2722 if fails_marker: 

2723 reqs = [] 

2724 new_extra = safe_extra(new_extra) or None 

2725 

2726 dm.setdefault(new_extra, []).extend(reqs) 

2727 return dm 

2728 

2729 def _build_dep_map(self): 

2730 dm = {} 

2731 for name in 'requires.txt', 'depends.txt': 

2732 for extra, reqs in split_sections(self._get_metadata(name)): 

2733 dm.setdefault(extra, []).extend(parse_requirements(reqs)) 

2734 return dm 

2735 

2736 def requires(self, extras=()): 

2737 """List of Requirements needed for this distro if `extras` are used""" 

2738 dm = self._dep_map 

2739 deps = [] 

2740 deps.extend(dm.get(None, ())) 

2741 for ext in extras: 

2742 try: 

2743 deps.extend(dm[safe_extra(ext)]) 

2744 except KeyError: 

2745 raise UnknownExtra( 

2746 "%s has no such extra feature %r" % (self, ext) 

2747 ) 

2748 return deps 

2749 

2750 def _get_metadata_path_for_display(self, name): 

2751 """ 

2752 Return the path to the given metadata file, if available. 

2753 """ 

2754 try: 

2755 # We need to access _get_metadata_path() on the provider object 

2756 # directly rather than through this class's __getattr__() 

2757 # since _get_metadata_path() is marked private. 

2758 path = self._provider._get_metadata_path(name) 

2759 

2760 # Handle exceptions e.g. in case the distribution's metadata 

2761 # provider doesn't support _get_metadata_path(). 

2762 except Exception: 

2763 return '[could not detect]' 

2764 

2765 return path 

2766 

2767 def _get_metadata(self, name): 

2768 if self.has_metadata(name): 

2769 for line in self.get_metadata_lines(name): 

2770 yield line 

2771 

2772 def _get_version(self): 

2773 lines = self._get_metadata(self.PKG_INFO) 

2774 version = _version_from_file(lines) 

2775 

2776 return version 

2777 

2778 def activate(self, path=None, replace=False): 

2779 """Ensure distribution is importable on `path` (default=sys.path)""" 

2780 if path is None: 

2781 path = sys.path 

2782 self.insert_on(path, replace=replace) 

2783 if path is sys.path: 

2784 fixup_namespace_packages(self.location) 

2785 for pkg in self._get_metadata('namespace_packages.txt'): 

2786 if pkg in sys.modules: 

2787 declare_namespace(pkg) 

2788 

2789 def egg_name(self): 

2790 """Return what this distribution's standard .egg filename should be""" 

2791 filename = "%s-%s-py%s" % ( 

2792 to_filename(self.project_name), to_filename(self.version), 

2793 self.py_version or PY_MAJOR 

2794 ) 

2795 

2796 if self.platform: 

2797 filename += '-' + self.platform 

2798 return filename 

2799 

2800 def __repr__(self): 

2801 if self.location: 

2802 return "%s (%s)" % (self, self.location) 

2803 else: 

2804 return str(self) 

2805 

2806 def __str__(self): 

2807 try: 

2808 version = getattr(self, 'version', None) 

2809 except ValueError: 

2810 version = None 

2811 version = version or "[unknown version]" 

2812 return "%s %s" % (self.project_name, version) 

2813 

2814 def __getattr__(self, attr): 

2815 """Delegate all unrecognized public attributes to .metadata provider""" 

2816 if attr.startswith('_'): 

2817 raise AttributeError(attr) 

2818 return getattr(self._provider, attr) 

2819 

2820 def __dir__(self): 

2821 return list( 

2822 set(super(Distribution, self).__dir__()) 

2823 | set( 

2824 attr for attr in self._provider.__dir__() 

2825 if not attr.startswith('_') 

2826 ) 

2827 ) 

2828 

2829 if not hasattr(object, '__dir__'): 

2830 # python 2.7 not supported 

2831 del __dir__ 

2832 

2833 @classmethod 

2834 def from_filename(cls, filename, metadata=None, **kw): 

2835 return cls.from_location( 

2836 _normalize_cached(filename), os.path.basename(filename), metadata, 

2837 **kw 

2838 ) 

2839 

2840 def as_requirement(self): 

2841 """Return a ``Requirement`` that matches this distribution exactly""" 

2842 if isinstance(self.parsed_version, packaging.version.Version): 

2843 spec = "%s==%s" % (self.project_name, self.parsed_version) 

2844 else: 

2845 spec = "%s===%s" % (self.project_name, self.parsed_version) 

2846 

2847 return Requirement.parse(spec) 

2848 

2849 def load_entry_point(self, group, name): 

2850 """Return the `name` entry point of `group` or raise ImportError""" 

2851 ep = self.get_entry_info(group, name) 

2852 if ep is None: 

2853 raise ImportError("Entry point %r not found" % ((group, name),)) 

2854 return ep.load() 

2855 

2856 def get_entry_map(self, group=None): 

2857 """Return the entry point map for `group`, or the full entry map""" 

2858 try: 

2859 ep_map = self._ep_map 

2860 except AttributeError: 

2861 ep_map = self._ep_map = EntryPoint.parse_map( 

2862 self._get_metadata('entry_points.txt'), self 

2863 ) 

2864 if group is not None: 

2865 return ep_map.get(group, {}) 

2866 return ep_map 

2867 

2868 def get_entry_info(self, group, name): 

2869 """Return the EntryPoint object for `group`+`name`, or ``None``""" 

2870 return self.get_entry_map(group).get(name) 

2871 

2872 def insert_on(self, path, loc=None, replace=False): 

2873 """Ensure self.location is on path 

2874 

2875 If replace=False (default): 

2876 - If location is already in path anywhere, do nothing. 

2877 - Else: 

2878 - If it's an egg and its parent directory is on path, 

2879 insert just ahead of the parent. 

2880 - Else: add to the end of path. 

2881 If replace=True: 

2882 - If location is already on path anywhere (not eggs) 

2883 or higher priority than its parent (eggs) 

2884 do nothing. 

2885 - Else: 

2886 - If it's an egg and its parent directory is on path, 

2887 insert just ahead of the parent, 

2888 removing any lower-priority entries. 

2889 - Else: add it to the front of path. 

2890 """ 

2891 

2892 loc = loc or self.location 

2893 if not loc: 

2894 return 

2895 

2896 nloc = _normalize_cached(loc) 

2897 bdir = os.path.dirname(nloc) 

2898 npath = [(p and _normalize_cached(p) or p) for p in path] 

2899 

2900 for p, item in enumerate(npath): 

2901 if item == nloc: 

2902 if replace: 

2903 break 

2904 else: 

2905 # don't modify path (even removing duplicates) if 

2906 # found and not replace 

2907 return 

2908 elif item == bdir and self.precedence == EGG_DIST: 

2909 # if it's an .egg, give it precedence over its directory 

2910 # UNLESS it's already been added to sys.path and replace=False 

2911 if (not replace) and nloc in npath[p:]: 

2912 return 

2913 if path is sys.path: 

2914 self.check_version_conflict() 

2915 path.insert(p, loc) 

2916 npath.insert(p, nloc) 

2917 break 

2918 else: 

2919 if path is sys.path: 

2920 self.check_version_conflict() 

2921 if replace: 

2922 path.insert(0, loc) 

2923 else: 

2924 path.append(loc) 

2925 return 

2926 

2927 # p is the spot where we found or inserted loc; now remove duplicates 

2928 while True: 

2929 try: 

2930 np = npath.index(nloc, p + 1) 

2931 except ValueError: 

2932 break 

2933 else: 

2934 del npath[np], path[np] 

2935 # ha! 

2936 p = np 

2937 

2938 return 

2939 

2940 def check_version_conflict(self): 

2941 if self.key == 'setuptools': 

2942 # ignore the inevitable setuptools self-conflicts :( 

2943 return 

2944 

2945 nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt')) 

2946 loc = normalize_path(self.location) 

2947 for modname in self._get_metadata('top_level.txt'): 

2948 if (modname not in sys.modules or modname in nsp 

2949 or modname in _namespace_packages): 

2950 continue 

2951 if modname in ('pkg_resources', 'setuptools', 'site'): 

2952 continue 

2953 fn = getattr(sys.modules[modname], '__file__', None) 

2954 if fn and (normalize_path(fn).startswith(loc) or 

2955 fn.startswith(self.location)): 

2956 continue 

2957 issue_warning( 

2958 "Module %s was already imported from %s, but %s is being added" 

2959 " to sys.path" % (modname, fn, self.location), 

2960 ) 

2961 

2962 def has_version(self): 

2963 try: 

2964 self.version 

2965 except ValueError: 

2966 issue_warning("Unbuilt egg for " + repr(self)) 

2967 return False 

2968 return True 

2969 

2970 def clone(self, **kw): 

2971 """Copy this distribution, substituting in any changed keyword args""" 

2972 names = 'project_name version py_version platform location precedence' 

2973 for attr in names.split(): 

2974 kw.setdefault(attr, getattr(self, attr, None)) 

2975 kw.setdefault('metadata', self._provider) 

2976 return self.__class__(**kw) 

2977 

2978 @property 

2979 def extras(self): 

2980 return [dep for dep in self._dep_map if dep] 

2981 

2982 

2983class EggInfoDistribution(Distribution): 

2984 def _reload_version(self): 

2985 """ 

2986 Packages installed by distutils (e.g. numpy or scipy), 

2987 which uses an old safe_version, and so 

2988 their version numbers can get mangled when 

2989 converted to filenames (e.g., 1.11.0.dev0+2329eae to 

2990 1.11.0.dev0_2329eae). These distributions will not be 

2991 parsed properly 

2992 downstream by Distribution and safe_version, so 

2993 take an extra step and try to get the version number from 

2994 the metadata file itself instead of the filename. 

2995 """ 

2996 md_version = self._get_version() 

2997 if md_version: 

2998 self._version = md_version 

2999 return self 

3000 

3001 

3002class DistInfoDistribution(Distribution): 

3003 """ 

3004 Wrap an actual or potential sys.path entry 

3005 w/metadata, .dist-info style. 

3006 """ 

3007 PKG_INFO = 'METADATA' 

3008 EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])") 

3009 

3010 @property 

3011 def _parsed_pkg_info(self): 

3012 """Parse and cache metadata""" 

3013 try: 

3014 return self._pkg_info 

3015 except AttributeError: 

3016 metadata = self.get_metadata(self.PKG_INFO) 

3017 self._pkg_info = email.parser.Parser().parsestr(metadata) 

3018 return self._pkg_info 

3019 

3020 @property 

3021 def _dep_map(self): 

3022 try: 

3023 return self.__dep_map 

3024 except AttributeError: 

3025 self.__dep_map = self._compute_dependencies() 

3026 return self.__dep_map 

3027 

3028 def _compute_dependencies(self): 

3029 """Recompute this distribution's dependencies.""" 

3030 dm = self.__dep_map = {None: []} 

3031 

3032 reqs = [] 

3033 # Including any condition expressions 

3034 for req in self._parsed_pkg_info.get_all('Requires-Dist') or []: 

3035 reqs.extend(parse_requirements(req)) 

3036 

3037 def reqs_for_extra(extra): 

3038 for req in reqs: 

3039 if not req.marker or req.marker.evaluate({'extra': extra}): 

3040 yield req 

3041 

3042 common = frozenset(reqs_for_extra(None)) 

3043 dm[None].extend(common) 

3044 

3045 for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []: 

3046 s_extra = safe_extra(extra.strip()) 

3047 dm[s_extra] = list(frozenset(reqs_for_extra(extra)) - common) 

3048 

3049 return dm 

3050 

3051 

3052_distributionImpl = { 

3053 '.egg': Distribution, 

3054 '.egg-info': EggInfoDistribution, 

3055 '.dist-info': DistInfoDistribution, 

3056} 

3057 

3058 

3059def issue_warning(*args, **kw): 

3060 level = 1 

3061 g = globals() 

3062 try: 

3063 # find the first stack frame that is *not* code in 

3064 # the pkg_resources module, to use for the warning 

3065 while sys._getframe(level).f_globals is g: 

3066 level += 1 

3067 except ValueError: 

3068 pass 

3069 warnings.warn(stacklevel=level + 1, *args, **kw) 

3070 

3071 

3072class RequirementParseError(ValueError): 

3073 def __str__(self): 

3074 return ' '.join(self.args) 

3075 

3076 

3077def parse_requirements(strs): 

3078 """Yield ``Requirement`` objects for each specification in `strs` 

3079 

3080 `strs` must be a string, or a (possibly-nested) iterable thereof. 

3081 """ 

3082 # create a steppable iterator, so we can handle \-continuations 

3083 lines = iter(yield_lines(strs)) 

3084 

3085 for line in lines: 

3086 # Drop comments -- a hash without a space may be in a URL. 

3087 if ' #' in line: 

3088 line = line[:line.find(' #')] 

3089 # If there is a line continuation, drop it, and append the next line. 

3090 if line.endswith('\\'): 

3091 line = line[:-2].strip() 

3092 try: 

3093 line += next(lines) 

3094 except StopIteration: 

3095 return 

3096 yield Requirement(line) 

3097 

3098 

3099class Requirement(packaging.requirements.Requirement): 

3100 def __init__(self, requirement_string): 

3101 """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!""" 

3102 try: 

3103 super(Requirement, self).__init__(requirement_string) 

3104 except packaging.requirements.InvalidRequirement as e: 

3105 raise RequirementParseError(str(e)) 

3106 self.unsafe_name = self.name 

3107 project_name = safe_name(self.name) 

3108 self.project_name, self.key = project_name, project_name.lower() 

3109 self.specs = [ 

3110 (spec.operator, spec.version) for spec in self.specifier] 

3111 self.extras = tuple(map(safe_extra, self.extras)) 

3112 self.hashCmp = ( 

3113 self.key, 

3114 self.url, 

3115 self.specifier, 

3116 frozenset(self.extras), 

3117 str(self.marker) if self.marker else None, 

3118 ) 

3119 self.__hash = hash(self.hashCmp) 

3120 

3121 def __eq__(self, other): 

3122 return ( 

3123 isinstance(other, Requirement) and 

3124 self.hashCmp == other.hashCmp 

3125 ) 

3126 

3127 def __ne__(self, other): 

3128 return not self == other 

3129 

3130 def __contains__(self, item): 

3131 if isinstance(item, Distribution): 

3132 if item.key != self.key: 

3133 return False 

3134 

3135 item = item.version 

3136 

3137 # Allow prereleases always in order to match the previous behavior of 

3138 # this method. In the future this should be smarter and follow PEP 440 

3139 # more accurately. 

3140 return self.specifier.contains(item, prereleases=True) 

3141 

3142 def __hash__(self): 

3143 return self.__hash 

3144 

3145 def __repr__(self): 

3146 return "Requirement.parse(%r)" % str(self) 

3147 

3148 @staticmethod 

3149 def parse(s): 

3150 req, = parse_requirements(s) 

3151 return req 

3152 

3153 

3154def _always_object(classes): 

3155 """ 

3156 Ensure object appears in the mro even 

3157 for old-style classes. 

3158 """ 

3159 if object not in classes: 

3160 return classes + (object,) 

3161 return classes 

3162 

3163 

3164def _find_adapter(registry, ob): 

3165 """Return an adapter factory for `ob` from `registry`""" 

3166 types = _always_object(inspect.getmro(getattr(ob, '__class__', type(ob)))) 

3167 for t in types: 

3168 if t in registry: 

3169 return registry[t] 

3170 

3171 

3172def ensure_directory(path): 

3173 """Ensure that the parent directory of `path` exists""" 

3174 dirname = os.path.dirname(path) 

3175 py31compat.makedirs(dirname, exist_ok=True) 

3176 

3177 

3178def _bypass_ensure_directory(path): 

3179 """Sandbox-bypassing version of ensure_directory()""" 

3180 if not WRITE_SUPPORT: 

3181 raise IOError('"os.mkdir" not supported on this platform.') 

3182 dirname, filename = split(path) 

3183 if dirname and filename and not isdir(dirname): 

3184 _bypass_ensure_directory(dirname) 

3185 try: 

3186 mkdir(dirname, 0o755) 

3187 except FileExistsError: 

3188 pass 

3189 

3190 

3191def split_sections(s): 

3192 """Split a string or iterable thereof into (section, content) pairs 

3193 

3194 Each ``section`` is a stripped version of the section header ("[section]") 

3195 and each ``content`` is a list of stripped lines excluding blank lines and 

3196 comment-only lines. If there are any such lines before the first section 

3197 header, they're returned in a first ``section`` of ``None``. 

3198 """ 

3199 section = None 

3200 content = [] 

3201 for line in yield_lines(s): 

3202 if line.startswith("["): 

3203 if line.endswith("]"): 

3204 if section or content: 

3205 yield section, content 

3206 section = line[1:-1].strip() 

3207 content = [] 

3208 else: 

3209 raise ValueError("Invalid section heading", line) 

3210 else: 

3211 content.append(line) 

3212 

3213 # wrap up last segment 

3214 yield section, content 

3215 

3216 

3217def _mkstemp(*args, **kw): 

3218 old_open = os.open 

3219 try: 

3220 # temporarily bypass sandboxing 

3221 os.open = os_open 

3222 return tempfile.mkstemp(*args, **kw) 

3223 finally: 

3224 # and then put it back 

3225 os.open = old_open 

3226 

3227 

3228# Silence the PEP440Warning by default, so that end users don't get hit by it 

3229# randomly just because they use pkg_resources. We want to append the rule 

3230# because we want earlier uses of filterwarnings to take precedence over this 

3231# one. 

3232warnings.filterwarnings("ignore", category=PEP440Warning, append=True) 

3233 

3234 

3235# from jaraco.functools 1.3 

3236def _call_aside(f, *args, **kwargs): 

3237 f(*args, **kwargs) 

3238 return f 

3239 

3240 

3241@_call_aside 

3242def _initialize(g=globals()): 

3243 "Set up global resource manager (deliberately not state-saved)" 

3244 manager = ResourceManager() 

3245 g['_manager'] = manager 

3246 g.update( 

3247 (name, getattr(manager, name)) 

3248 for name in dir(manager) 

3249 if not name.startswith('_') 

3250 ) 

3251 

3252 

3253@_call_aside 

3254def _initialize_master_working_set(): 

3255 """ 

3256 Prepare the master working set and make the ``require()`` 

3257 API available. 

3258 

3259 This function has explicit effects on the global state 

3260 of pkg_resources. It is intended to be invoked once at 

3261 the initialization of this module. 

3262 

3263 Invocation by other packages is unsupported and done 

3264 at their own risk. 

3265 """ 

3266 working_set = WorkingSet._build_master() 

3267 _declare_state('object', working_set=working_set) 

3268 

3269 require = working_set.require 

3270 iter_entry_points = working_set.iter_entry_points 

3271 add_activation_listener = working_set.subscribe 

3272 run_script = working_set.run_script 

3273 # backward compatibility 

3274 run_main = run_script 

3275 # Activate all distributions already on sys.path with replace=False and 

3276 # ensure that all distributions added to the working set in the future 

3277 # (e.g. by calling ``require()``) will get activated as well, 

3278 # with higher priority (replace=True). 

3279 tuple( 

3280 dist.activate(replace=False) 

3281 for dist in working_set 

3282 ) 

3283 add_activation_listener( 

3284 lambda dist: dist.activate(replace=True), 

3285 existing=False, 

3286 ) 

3287 working_set.entries = [] 

3288 # match order 

3289 list(map(working_set.add_entry, sys.path)) 

3290 globals().update(locals()) 

3291 

3292 

3293class PkgResourcesDeprecationWarning(Warning): 

3294 """ 

3295 Base class for warning about deprecations in ``pkg_resources`` 

3296 

3297 This class is not derived from ``DeprecationWarning``, and as such is 

3298 visible by default. 

3299 """