Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/pkg_resources/__init__.py: 1%

1578 statements  

« prev     ^ index     » next       coverage.py v7.3.2, created at 2023-12-08 06:51 +0000

1# coding: utf-8 

2""" 

3Package resource API 

4-------------------- 

5 

6A resource is a logical file contained within a package, or a logical 

7subdirectory thereof. The package resource API expects resource names 

8to have their path parts separated with ``/``, *not* whatever the local 

9path separator is. Do not use os.path operations to manipulate resource 

10names being passed into the API. 

11 

12The package resource API is designed to work with normal filesystem packages, 

13.egg files, and unpacked .egg files. It can also work in a limited way with 

14.zip files and with custom PEP 302 loaders that support the ``get_data()`` 

15method. 

16""" 

17 

18from __future__ import absolute_import 

19 

20import sys 

21import os 

22import io 

23import time 

24import re 

25import types 

26import zipfile 

27import zipimport 

28import warnings 

29import stat 

30import functools 

31import pkgutil 

32import operator 

33import platform 

34import collections 

35import plistlib 

36import email.parser 

37import errno 

38import tempfile 

39import textwrap 

40import itertools 

41import inspect 

42import ntpath 

43import posixpath 

44from pkgutil import get_importer 

45 

46try: 

47 import _imp 

48except ImportError: 

49 # Python 3.2 compatibility 

50 import imp as _imp 

51 

52try: 

53 FileExistsError 

54except NameError: 

55 FileExistsError = OSError 

56 

57from pkg_resources.extern import six 

58from pkg_resources.extern.six.moves import urllib, map, filter 

59 

60# capture these to bypass sandboxing 

61from os import utime 

62try: 

63 from os import mkdir, rename, unlink 

64 WRITE_SUPPORT = True 

65except ImportError: 

66 # no write support, probably under GAE 

67 WRITE_SUPPORT = False 

68 

69from os import open as os_open 

70from os.path import isdir, split 

71 

72try: 

73 import importlib.machinery as importlib_machinery 

74 # access attribute to force import under delayed import mechanisms. 

75 importlib_machinery.__name__ 

76except ImportError: 

77 importlib_machinery = None 

78 

79from . import py31compat 

80from pkg_resources.extern import appdirs 

81from pkg_resources.extern import packaging 

82__import__('pkg_resources.extern.packaging.version') 

83__import__('pkg_resources.extern.packaging.specifiers') 

84__import__('pkg_resources.extern.packaging.requirements') 

85__import__('pkg_resources.extern.packaging.markers') 

86 

87 

88__metaclass__ = type 

89 

90 

91if (3, 0) < sys.version_info < (3, 4): 

92 raise RuntimeError("Python 3.4 or later is required") 

93 

94if six.PY2: 

95 # Those builtin exceptions are only defined in Python 3 

96 PermissionError = None 

97 NotADirectoryError = None 

98 

99# declare some globals that will be defined later to 

100# satisfy the linters. 

101require = None 

102working_set = None 

103add_activation_listener = None 

104resources_stream = None 

105cleanup_resources = None 

106resource_dir = None 

107resource_stream = None 

108set_extraction_path = None 

109resource_isdir = None 

110resource_string = None 

111iter_entry_points = None 

112resource_listdir = None 

113resource_filename = None 

114resource_exists = None 

115_distribution_finders = None 

116_namespace_handlers = None 

117_namespace_packages = None 

118 

119 

120class PEP440Warning(RuntimeWarning): 

121 """ 

122 Used when there is an issue with a version or specifier not complying with 

123 PEP 440. 

124 """ 

125 

126 

127def parse_version(v): 

128 try: 

129 return packaging.version.Version(v) 

130 except packaging.version.InvalidVersion: 

131 return packaging.version.LegacyVersion(v) 

132 

133 

134_state_vars = {} 

135 

136 

137def _declare_state(vartype, **kw): 

138 globals().update(kw) 

139 _state_vars.update(dict.fromkeys(kw, vartype)) 

140 

141 

142def __getstate__(): 

143 state = {} 

144 g = globals() 

145 for k, v in _state_vars.items(): 

146 state[k] = g['_sget_' + v](g[k]) 

147 return state 

148 

149 

150def __setstate__(state): 

151 g = globals() 

152 for k, v in state.items(): 

153 g['_sset_' + _state_vars[k]](k, g[k], v) 

154 return state 

155 

156 

157def _sget_dict(val): 

158 return val.copy() 

159 

160 

161def _sset_dict(key, ob, state): 

162 ob.clear() 

163 ob.update(state) 

164 

165 

166def _sget_object(val): 

167 return val.__getstate__() 

168 

169 

170def _sset_object(key, ob, state): 

171 ob.__setstate__(state) 

172 

173 

174_sget_none = _sset_none = lambda *args: None 

175 

176 

177def get_supported_platform(): 

178 """Return this platform's maximum compatible version. 

179 

180 distutils.util.get_platform() normally reports the minimum version 

181 of Mac OS X that would be required to *use* extensions produced by 

182 distutils. But what we want when checking compatibility is to know the 

183 version of Mac OS X that we are *running*. To allow usage of packages that 

184 explicitly require a newer version of Mac OS X, we must also know the 

185 current version of the OS. 

186 

187 If this condition occurs for any other platform with a version in its 

188 platform strings, this function should be extended accordingly. 

189 """ 

190 plat = get_build_platform() 

191 m = macosVersionString.match(plat) 

192 if m is not None and sys.platform == "darwin": 

193 try: 

194 plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3)) 

195 except ValueError: 

196 # not Mac OS X 

197 pass 

198 return plat 

199 

200 

201__all__ = [ 

202 # Basic resource access and distribution/entry point discovery 

203 'require', 'run_script', 'get_provider', 'get_distribution', 

204 'load_entry_point', 'get_entry_map', 'get_entry_info', 

205 'iter_entry_points', 

206 'resource_string', 'resource_stream', 'resource_filename', 

207 'resource_listdir', 'resource_exists', 'resource_isdir', 

208 

209 # Environmental control 

210 'declare_namespace', 'working_set', 'add_activation_listener', 

211 'find_distributions', 'set_extraction_path', 'cleanup_resources', 

212 'get_default_cache', 

213 

214 # Primary implementation classes 

215 'Environment', 'WorkingSet', 'ResourceManager', 

216 'Distribution', 'Requirement', 'EntryPoint', 

217 

218 # Exceptions 

219 'ResolutionError', 'VersionConflict', 'DistributionNotFound', 

220 'UnknownExtra', 'ExtractionError', 

221 

222 # Warnings 

223 'PEP440Warning', 

224 

225 # Parsing functions and string utilities 

226 'parse_requirements', 'parse_version', 'safe_name', 'safe_version', 

227 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections', 

228 'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker', 

229 

230 # filesystem utilities 

231 'ensure_directory', 'normalize_path', 

232 

233 # Distribution "precedence" constants 

234 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST', 

235 

236 # "Provider" interfaces, implementations, and registration/lookup APIs 

237 'IMetadataProvider', 'IResourceProvider', 'FileMetadata', 

238 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider', 

239 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider', 

240 'register_finder', 'register_namespace_handler', 'register_loader_type', 

241 'fixup_namespace_packages', 'get_importer', 

242 

243 # Warnings 

244 'PkgResourcesDeprecationWarning', 

245 

246 # Deprecated/backward compatibility only 

247 'run_main', 'AvailableDistributions', 

248] 

249 

250 

251class ResolutionError(Exception): 

252 """Abstract base for dependency resolution errors""" 

253 

254 def __repr__(self): 

255 return self.__class__.__name__ + repr(self.args) 

256 

257 

258class VersionConflict(ResolutionError): 

259 """ 

260 An already-installed version conflicts with the requested version. 

261 

262 Should be initialized with the installed Distribution and the requested 

263 Requirement. 

264 """ 

265 

266 _template = "{self.dist} is installed but {self.req} is required" 

267 

268 @property 

269 def dist(self): 

270 return self.args[0] 

271 

272 @property 

273 def req(self): 

274 return self.args[1] 

275 

276 def report(self): 

277 return self._template.format(**locals()) 

278 

279 def with_context(self, required_by): 

280 """ 

281 If required_by is non-empty, return a version of self that is a 

282 ContextualVersionConflict. 

283 """ 

284 if not required_by: 

285 return self 

286 args = self.args + (required_by,) 

287 return ContextualVersionConflict(*args) 

288 

289 

290class ContextualVersionConflict(VersionConflict): 

291 """ 

292 A VersionConflict that accepts a third parameter, the set of the 

293 requirements that required the installed Distribution. 

294 """ 

295 

296 _template = VersionConflict._template + ' by {self.required_by}' 

297 

298 @property 

299 def required_by(self): 

300 return self.args[2] 

301 

302 

303class DistributionNotFound(ResolutionError): 

304 """A requested distribution was not found""" 

305 

306 _template = ("The '{self.req}' distribution was not found " 

307 "and is required by {self.requirers_str}") 

308 

309 @property 

310 def req(self): 

311 return self.args[0] 

312 

313 @property 

314 def requirers(self): 

315 return self.args[1] 

316 

317 @property 

318 def requirers_str(self): 

319 if not self.requirers: 

320 return 'the application' 

321 return ', '.join(self.requirers) 

322 

323 def report(self): 

324 return self._template.format(**locals()) 

325 

326 def __str__(self): 

327 return self.report() 

328 

329 

330class UnknownExtra(ResolutionError): 

331 """Distribution doesn't have an "extra feature" of the given name""" 

332 

333 

334_provider_factories = {} 

335 

336PY_MAJOR = sys.version[:3] 

337EGG_DIST = 3 

338BINARY_DIST = 2 

339SOURCE_DIST = 1 

340CHECKOUT_DIST = 0 

341DEVELOP_DIST = -1 

342 

343 

344def register_loader_type(loader_type, provider_factory): 

345 """Register `provider_factory` to make providers for `loader_type` 

346 

347 `loader_type` is the type or class of a PEP 302 ``module.__loader__``, 

348 and `provider_factory` is a function that, passed a *module* object, 

349 returns an ``IResourceProvider`` for that module. 

350 """ 

351 _provider_factories[loader_type] = provider_factory 

352 

353 

354def get_provider(moduleOrReq): 

355 """Return an IResourceProvider for the named module or requirement""" 

356 if isinstance(moduleOrReq, Requirement): 

357 return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0] 

358 try: 

359 module = sys.modules[moduleOrReq] 

360 except KeyError: 

361 __import__(moduleOrReq) 

362 module = sys.modules[moduleOrReq] 

363 loader = getattr(module, '__loader__', None) 

364 return _find_adapter(_provider_factories, loader)(module) 

365 

366 

367def _macosx_vers(_cache=[]): 

368 if not _cache: 

369 version = platform.mac_ver()[0] 

370 # fallback for MacPorts 

371 if version == '': 

372 plist = '/System/Library/CoreServices/SystemVersion.plist' 

373 if os.path.exists(plist): 

374 if hasattr(plistlib, 'readPlist'): 

375 plist_content = plistlib.readPlist(plist) 

376 if 'ProductVersion' in plist_content: 

377 version = plist_content['ProductVersion'] 

378 

379 _cache.append(version.split('.')) 

380 return _cache[0] 

381 

382 

383def _macosx_arch(machine): 

384 return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine) 

385 

386 

387def get_build_platform(): 

388 """Return this platform's string for platform-specific distributions 

389 

390 XXX Currently this is the same as ``distutils.util.get_platform()``, but it 

391 needs some hacks for Linux and Mac OS X. 

392 """ 

393 from sysconfig import get_platform 

394 

395 plat = get_platform() 

396 if sys.platform == "darwin" and not plat.startswith('macosx-'): 

397 try: 

398 version = _macosx_vers() 

399 machine = os.uname()[4].replace(" ", "_") 

400 return "macosx-%d.%d-%s" % ( 

401 int(version[0]), int(version[1]), 

402 _macosx_arch(machine), 

403 ) 

404 except ValueError: 

405 # if someone is running a non-Mac darwin system, this will fall 

406 # through to the default implementation 

407 pass 

408 return plat 

409 

410 

411macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)") 

412darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)") 

413# XXX backward compat 

414get_platform = get_build_platform 

415 

416 

417def compatible_platforms(provided, required): 

418 """Can code for the `provided` platform run on the `required` platform? 

419 

420 Returns true if either platform is ``None``, or the platforms are equal. 

421 

422 XXX Needs compatibility checks for Linux and other unixy OSes. 

423 """ 

424 if provided is None or required is None or provided == required: 

425 # easy case 

426 return True 

427 

428 # Mac OS X special cases 

429 reqMac = macosVersionString.match(required) 

430 if reqMac: 

431 provMac = macosVersionString.match(provided) 

432 

433 # is this a Mac package? 

434 if not provMac: 

435 # this is backwards compatibility for packages built before 

436 # setuptools 0.6. All packages built after this point will 

437 # use the new macosx designation. 

438 provDarwin = darwinVersionString.match(provided) 

439 if provDarwin: 

440 dversion = int(provDarwin.group(1)) 

441 macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2)) 

442 if dversion == 7 and macosversion >= "10.3" or \ 

443 dversion == 8 and macosversion >= "10.4": 

444 return True 

445 # egg isn't macosx or legacy darwin 

446 return False 

447 

448 # are they the same major version and machine type? 

449 if provMac.group(1) != reqMac.group(1) or \ 

450 provMac.group(3) != reqMac.group(3): 

451 return False 

452 

453 # is the required OS major update >= the provided one? 

454 if int(provMac.group(2)) > int(reqMac.group(2)): 

455 return False 

456 

457 return True 

458 

459 # XXX Linux and other platforms' special cases should go here 

460 return False 

461 

462 

463def run_script(dist_spec, script_name): 

464 """Locate distribution `dist_spec` and run its `script_name` script""" 

465 ns = sys._getframe(1).f_globals 

466 name = ns['__name__'] 

467 ns.clear() 

468 ns['__name__'] = name 

469 require(dist_spec)[0].run_script(script_name, ns) 

470 

471 

472# backward compatibility 

473run_main = run_script 

474 

475 

476def get_distribution(dist): 

477 """Return a current distribution object for a Requirement or string""" 

478 if isinstance(dist, six.string_types): 

479 dist = Requirement.parse(dist) 

480 if isinstance(dist, Requirement): 

481 dist = get_provider(dist) 

482 if not isinstance(dist, Distribution): 

483 raise TypeError("Expected string, Requirement, or Distribution", dist) 

484 return dist 

485 

486 

487def load_entry_point(dist, group, name): 

488 """Return `name` entry point of `group` for `dist` or raise ImportError""" 

489 return get_distribution(dist).load_entry_point(group, name) 

490 

491 

492def get_entry_map(dist, group=None): 

493 """Return the entry point map for `group`, or the full entry map""" 

494 return get_distribution(dist).get_entry_map(group) 

495 

496 

497def get_entry_info(dist, group, name): 

498 """Return the EntryPoint object for `group`+`name`, or ``None``""" 

499 return get_distribution(dist).get_entry_info(group, name) 

500 

501 

502class IMetadataProvider: 

503 def has_metadata(name): 

504 """Does the package's distribution contain the named metadata?""" 

505 

506 def get_metadata(name): 

507 """The named metadata resource as a string""" 

508 

509 def get_metadata_lines(name): 

510 """Yield named metadata resource as list of non-blank non-comment lines 

511 

512 Leading and trailing whitespace is stripped from each line, and lines 

513 with ``#`` as the first non-blank character are omitted.""" 

514 

515 def metadata_isdir(name): 

516 """Is the named metadata a directory? (like ``os.path.isdir()``)""" 

517 

518 def metadata_listdir(name): 

519 """List of metadata names in the directory (like ``os.listdir()``)""" 

520 

521 def run_script(script_name, namespace): 

522 """Execute the named script in the supplied namespace dictionary""" 

523 

524 

525class IResourceProvider(IMetadataProvider): 

526 """An object that provides access to package resources""" 

527 

528 def get_resource_filename(manager, resource_name): 

529 """Return a true filesystem path for `resource_name` 

530 

531 `manager` must be an ``IResourceManager``""" 

532 

533 def get_resource_stream(manager, resource_name): 

534 """Return a readable file-like object for `resource_name` 

535 

536 `manager` must be an ``IResourceManager``""" 

537 

538 def get_resource_string(manager, resource_name): 

539 """Return a string containing the contents of `resource_name` 

540 

541 `manager` must be an ``IResourceManager``""" 

542 

543 def has_resource(resource_name): 

544 """Does the package contain the named resource?""" 

545 

546 def resource_isdir(resource_name): 

547 """Is the named resource a directory? (like ``os.path.isdir()``)""" 

548 

549 def resource_listdir(resource_name): 

550 """List of resource names in the directory (like ``os.listdir()``)""" 

551 

552 

553class WorkingSet: 

554 """A collection of active distributions on sys.path (or a similar list)""" 

555 

556 def __init__(self, entries=None): 

557 """Create working set from list of path entries (default=sys.path)""" 

558 self.entries = [] 

559 self.entry_keys = {} 

560 self.by_key = {} 

561 self.callbacks = [] 

562 

563 if entries is None: 

564 entries = sys.path 

565 

566 for entry in entries: 

567 self.add_entry(entry) 

568 

569 @classmethod 

570 def _build_master(cls): 

571 """ 

572 Prepare the master working set. 

573 """ 

574 ws = cls() 

575 try: 

576 from __main__ import __requires__ 

577 except ImportError: 

578 # The main program does not list any requirements 

579 return ws 

580 

581 # ensure the requirements are met 

582 try: 

583 ws.require(__requires__) 

584 except VersionConflict: 

585 return cls._build_from_requirements(__requires__) 

586 

587 return ws 

588 

589 @classmethod 

590 def _build_from_requirements(cls, req_spec): 

591 """ 

592 Build a working set from a requirement spec. Rewrites sys.path. 

593 """ 

594 # try it without defaults already on sys.path 

595 # by starting with an empty path 

596 ws = cls([]) 

597 reqs = parse_requirements(req_spec) 

598 dists = ws.resolve(reqs, Environment()) 

599 for dist in dists: 

600 ws.add(dist) 

601 

602 # add any missing entries from sys.path 

603 for entry in sys.path: 

604 if entry not in ws.entries: 

605 ws.add_entry(entry) 

606 

607 # then copy back to sys.path 

608 sys.path[:] = ws.entries 

609 return ws 

610 

611 def add_entry(self, entry): 

612 """Add a path item to ``.entries``, finding any distributions on it 

613 

614 ``find_distributions(entry, True)`` is used to find distributions 

615 corresponding to the path entry, and they are added. `entry` is 

616 always appended to ``.entries``, even if it is already present. 

617 (This is because ``sys.path`` can contain the same value more than 

618 once, and the ``.entries`` of the ``sys.path`` WorkingSet should always 

619 equal ``sys.path``.) 

620 """ 

621 self.entry_keys.setdefault(entry, []) 

622 self.entries.append(entry) 

623 for dist in find_distributions(entry, True): 

624 self.add(dist, entry, False) 

625 

626 def __contains__(self, dist): 

627 """True if `dist` is the active distribution for its project""" 

628 return self.by_key.get(dist.key) == dist 

629 

630 def find(self, req): 

631 """Find a distribution matching requirement `req` 

632 

633 If there is an active distribution for the requested project, this 

634 returns it as long as it meets the version requirement specified by 

635 `req`. But, if there is an active distribution for the project and it 

636 does *not* meet the `req` requirement, ``VersionConflict`` is raised. 

637 If there is no active distribution for the requested project, ``None`` 

638 is returned. 

639 """ 

640 dist = self.by_key.get(req.key) 

641 if dist is not None and dist not in req: 

642 # XXX add more info 

643 raise VersionConflict(dist, req) 

644 return dist 

645 

646 def iter_entry_points(self, group, name=None): 

647 """Yield entry point objects from `group` matching `name` 

648 

649 If `name` is None, yields all entry points in `group` from all 

650 distributions in the working set, otherwise only ones matching 

651 both `group` and `name` are yielded (in distribution order). 

652 """ 

653 return ( 

654 entry 

655 for dist in self 

656 for entry in dist.get_entry_map(group).values() 

657 if name is None or name == entry.name 

658 ) 

659 

660 def run_script(self, requires, script_name): 

661 """Locate distribution for `requires` and run `script_name` script""" 

662 ns = sys._getframe(1).f_globals 

663 name = ns['__name__'] 

664 ns.clear() 

665 ns['__name__'] = name 

666 self.require(requires)[0].run_script(script_name, ns) 

667 

668 def __iter__(self): 

669 """Yield distributions for non-duplicate projects in the working set 

670 

671 The yield order is the order in which the items' path entries were 

672 added to the working set. 

673 """ 

674 seen = {} 

675 for item in self.entries: 

676 if item not in self.entry_keys: 

677 # workaround a cache issue 

678 continue 

679 

680 for key in self.entry_keys[item]: 

681 if key not in seen: 

682 seen[key] = 1 

683 yield self.by_key[key] 

684 

685 def add(self, dist, entry=None, insert=True, replace=False): 

686 """Add `dist` to working set, associated with `entry` 

687 

688 If `entry` is unspecified, it defaults to the ``.location`` of `dist`. 

689 On exit from this routine, `entry` is added to the end of the working 

690 set's ``.entries`` (if it wasn't already present). 

691 

692 `dist` is only added to the working set if it's for a project that 

693 doesn't already have a distribution in the set, unless `replace=True`. 

694 If it's added, any callbacks registered with the ``subscribe()`` method 

695 will be called. 

696 """ 

697 if insert: 

698 dist.insert_on(self.entries, entry, replace=replace) 

699 

700 if entry is None: 

701 entry = dist.location 

702 keys = self.entry_keys.setdefault(entry, []) 

703 keys2 = self.entry_keys.setdefault(dist.location, []) 

704 if not replace and dist.key in self.by_key: 

705 # ignore hidden distros 

706 return 

707 

708 self.by_key[dist.key] = dist 

709 if dist.key not in keys: 

710 keys.append(dist.key) 

711 if dist.key not in keys2: 

712 keys2.append(dist.key) 

713 self._added_new(dist) 

714 

715 def resolve(self, requirements, env=None, installer=None, 

716 replace_conflicting=False, extras=None): 

717 """List all distributions needed to (recursively) meet `requirements` 

718 

719 `requirements` must be a sequence of ``Requirement`` objects. `env`, 

720 if supplied, should be an ``Environment`` instance. If 

721 not supplied, it defaults to all distributions available within any 

722 entry or distribution in the working set. `installer`, if supplied, 

723 will be invoked with each requirement that cannot be met by an 

724 already-installed distribution; it should return a ``Distribution`` or 

725 ``None``. 

726 

727 Unless `replace_conflicting=True`, raises a VersionConflict exception 

728 if 

729 any requirements are found on the path that have the correct name but 

730 the wrong version. Otherwise, if an `installer` is supplied it will be 

731 invoked to obtain the correct version of the requirement and activate 

732 it. 

733 

734 `extras` is a list of the extras to be used with these requirements. 

735 This is important because extra requirements may look like `my_req; 

736 extra = "my_extra"`, which would otherwise be interpreted as a purely 

737 optional requirement. Instead, we want to be able to assert that these 

738 requirements are truly required. 

739 """ 

740 

741 # set up the stack 

742 requirements = list(requirements)[::-1] 

743 # set of processed requirements 

744 processed = {} 

745 # key -> dist 

746 best = {} 

747 to_activate = [] 

748 

749 req_extras = _ReqExtras() 

750 

751 # Mapping of requirement to set of distributions that required it; 

752 # useful for reporting info about conflicts. 

753 required_by = collections.defaultdict(set) 

754 

755 while requirements: 

756 # process dependencies breadth-first 

757 req = requirements.pop(0) 

758 if req in processed: 

759 # Ignore cyclic or redundant dependencies 

760 continue 

761 

762 if not req_extras.markers_pass(req, extras): 

763 continue 

764 

765 dist = best.get(req.key) 

766 if dist is None: 

767 # Find the best distribution and add it to the map 

768 dist = self.by_key.get(req.key) 

769 if dist is None or (dist not in req and replace_conflicting): 

770 ws = self 

771 if env is None: 

772 if dist is None: 

773 env = Environment(self.entries) 

774 else: 

775 # Use an empty environment and workingset to avoid 

776 # any further conflicts with the conflicting 

777 # distribution 

778 env = Environment([]) 

779 ws = WorkingSet([]) 

780 dist = best[req.key] = env.best_match( 

781 req, ws, installer, 

782 replace_conflicting=replace_conflicting 

783 ) 

784 if dist is None: 

785 requirers = required_by.get(req, None) 

786 raise DistributionNotFound(req, requirers) 

787 to_activate.append(dist) 

788 if dist not in req: 

789 # Oops, the "best" so far conflicts with a dependency 

790 dependent_req = required_by[req] 

791 raise VersionConflict(dist, req).with_context(dependent_req) 

792 

793 # push the new requirements onto the stack 

794 new_requirements = dist.requires(req.extras)[::-1] 

795 requirements.extend(new_requirements) 

796 

797 # Register the new requirements needed by req 

798 for new_requirement in new_requirements: 

799 required_by[new_requirement].add(req.project_name) 

800 req_extras[new_requirement] = req.extras 

801 

802 processed[req] = True 

803 

804 # return list of distros to activate 

805 return to_activate 

806 

807 def find_plugins( 

808 self, plugin_env, full_env=None, installer=None, fallback=True): 

809 """Find all activatable distributions in `plugin_env` 

810 

811 Example usage:: 

812 

813 distributions, errors = working_set.find_plugins( 

814 Environment(plugin_dirlist) 

815 ) 

816 # add plugins+libs to sys.path 

817 map(working_set.add, distributions) 

818 # display errors 

819 print('Could not load', errors) 

820 

821 The `plugin_env` should be an ``Environment`` instance that contains 

822 only distributions that are in the project's "plugin directory" or 

823 directories. The `full_env`, if supplied, should be an ``Environment`` 

824 contains all currently-available distributions. If `full_env` is not 

825 supplied, one is created automatically from the ``WorkingSet`` this 

826 method is called on, which will typically mean that every directory on 

827 ``sys.path`` will be scanned for distributions. 

828 

829 `installer` is a standard installer callback as used by the 

830 ``resolve()`` method. The `fallback` flag indicates whether we should 

831 attempt to resolve older versions of a plugin if the newest version 

832 cannot be resolved. 

833 

834 This method returns a 2-tuple: (`distributions`, `error_info`), where 

835 `distributions` is a list of the distributions found in `plugin_env` 

836 that were loadable, along with any other distributions that are needed 

837 to resolve their dependencies. `error_info` is a dictionary mapping 

838 unloadable plugin distributions to an exception instance describing the 

839 error that occurred. Usually this will be a ``DistributionNotFound`` or 

840 ``VersionConflict`` instance. 

841 """ 

842 

843 plugin_projects = list(plugin_env) 

844 # scan project names in alphabetic order 

845 plugin_projects.sort() 

846 

847 error_info = {} 

848 distributions = {} 

849 

850 if full_env is None: 

851 env = Environment(self.entries) 

852 env += plugin_env 

853 else: 

854 env = full_env + plugin_env 

855 

856 shadow_set = self.__class__([]) 

857 # put all our entries in shadow_set 

858 list(map(shadow_set.add, self)) 

859 

860 for project_name in plugin_projects: 

861 

862 for dist in plugin_env[project_name]: 

863 

864 req = [dist.as_requirement()] 

865 

866 try: 

867 resolvees = shadow_set.resolve(req, env, installer) 

868 

869 except ResolutionError as v: 

870 # save error info 

871 error_info[dist] = v 

872 if fallback: 

873 # try the next older version of project 

874 continue 

875 else: 

876 # give up on this project, keep going 

877 break 

878 

879 else: 

880 list(map(shadow_set.add, resolvees)) 

881 distributions.update(dict.fromkeys(resolvees)) 

882 

883 # success, no need to try any more versions of this project 

884 break 

885 

886 distributions = list(distributions) 

887 distributions.sort() 

888 

889 return distributions, error_info 

890 

891 def require(self, *requirements): 

892 """Ensure that distributions matching `requirements` are activated 

893 

894 `requirements` must be a string or a (possibly-nested) sequence 

895 thereof, specifying the distributions and versions required. The 

896 return value is a sequence of the distributions that needed to be 

897 activated to fulfill the requirements; all relevant distributions are 

898 included, even if they were already activated in this working set. 

899 """ 

900 needed = self.resolve(parse_requirements(requirements)) 

901 

902 for dist in needed: 

903 self.add(dist) 

904 

905 return needed 

906 

907 def subscribe(self, callback, existing=True): 

908 """Invoke `callback` for all distributions 

909 

910 If `existing=True` (default), 

911 call on all existing ones, as well. 

912 """ 

913 if callback in self.callbacks: 

914 return 

915 self.callbacks.append(callback) 

916 if not existing: 

917 return 

918 for dist in self: 

919 callback(dist) 

920 

921 def _added_new(self, dist): 

922 for callback in self.callbacks: 

923 callback(dist) 

924 

925 def __getstate__(self): 

926 return ( 

927 self.entries[:], self.entry_keys.copy(), self.by_key.copy(), 

928 self.callbacks[:] 

929 ) 

930 

931 def __setstate__(self, e_k_b_c): 

932 entries, keys, by_key, callbacks = e_k_b_c 

933 self.entries = entries[:] 

934 self.entry_keys = keys.copy() 

935 self.by_key = by_key.copy() 

936 self.callbacks = callbacks[:] 

937 

938 

939class _ReqExtras(dict): 

940 """ 

941 Map each requirement to the extras that demanded it. 

942 """ 

943 

944 def markers_pass(self, req, extras=None): 

945 """ 

946 Evaluate markers for req against each extra that 

947 demanded it. 

948 

949 Return False if the req has a marker and fails 

950 evaluation. Otherwise, return True. 

951 """ 

952 extra_evals = ( 

953 req.marker.evaluate({'extra': extra}) 

954 for extra in self.get(req, ()) + (extras or (None,)) 

955 ) 

956 return not req.marker or any(extra_evals) 

957 

958 

959class Environment: 

960 """Searchable snapshot of distributions on a search path""" 

961 

962 def __init__( 

963 self, search_path=None, platform=get_supported_platform(), 

964 python=PY_MAJOR): 

965 """Snapshot distributions available on a search path 

966 

967 Any distributions found on `search_path` are added to the environment. 

968 `search_path` should be a sequence of ``sys.path`` items. If not 

969 supplied, ``sys.path`` is used. 

970 

971 `platform` is an optional string specifying the name of the platform 

972 that platform-specific distributions must be compatible with. If 

973 unspecified, it defaults to the current platform. `python` is an 

974 optional string naming the desired version of Python (e.g. ``'3.6'``); 

975 it defaults to the current version. 

976 

977 You may explicitly set `platform` (and/or `python`) to ``None`` if you 

978 wish to map *all* distributions, not just those compatible with the 

979 running platform or Python version. 

980 """ 

981 self._distmap = {} 

982 self.platform = platform 

983 self.python = python 

984 self.scan(search_path) 

985 

986 def can_add(self, dist): 

987 """Is distribution `dist` acceptable for this environment? 

988 

989 The distribution must match the platform and python version 

990 requirements specified when this environment was created, or False 

991 is returned. 

992 """ 

993 py_compat = ( 

994 self.python is None 

995 or dist.py_version is None 

996 or dist.py_version == self.python 

997 ) 

998 return py_compat and compatible_platforms(dist.platform, self.platform) 

999 

1000 def remove(self, dist): 

1001 """Remove `dist` from the environment""" 

1002 self._distmap[dist.key].remove(dist) 

1003 

1004 def scan(self, search_path=None): 

1005 """Scan `search_path` for distributions usable in this environment 

1006 

1007 Any distributions found are added to the environment. 

1008 `search_path` should be a sequence of ``sys.path`` items. If not 

1009 supplied, ``sys.path`` is used. Only distributions conforming to 

1010 the platform/python version defined at initialization are added. 

1011 """ 

1012 if search_path is None: 

1013 search_path = sys.path 

1014 

1015 for item in search_path: 

1016 for dist in find_distributions(item): 

1017 self.add(dist) 

1018 

1019 def __getitem__(self, project_name): 

1020 """Return a newest-to-oldest list of distributions for `project_name` 

1021 

1022 Uses case-insensitive `project_name` comparison, assuming all the 

1023 project's distributions use their project's name converted to all 

1024 lowercase as their key. 

1025 

1026 """ 

1027 distribution_key = project_name.lower() 

1028 return self._distmap.get(distribution_key, []) 

1029 

1030 def add(self, dist): 

1031 """Add `dist` if we ``can_add()`` it and it has not already been added 

1032 """ 

1033 if self.can_add(dist) and dist.has_version(): 

1034 dists = self._distmap.setdefault(dist.key, []) 

1035 if dist not in dists: 

1036 dists.append(dist) 

1037 dists.sort(key=operator.attrgetter('hashcmp'), reverse=True) 

1038 

1039 def best_match( 

1040 self, req, working_set, installer=None, replace_conflicting=False): 

1041 """Find distribution best matching `req` and usable on `working_set` 

1042 

1043 This calls the ``find(req)`` method of the `working_set` to see if a 

1044 suitable distribution is already active. (This may raise 

1045 ``VersionConflict`` if an unsuitable version of the project is already 

1046 active in the specified `working_set`.) If a suitable distribution 

1047 isn't active, this method returns the newest distribution in the 

1048 environment that meets the ``Requirement`` in `req`. If no suitable 

1049 distribution is found, and `installer` is supplied, then the result of 

1050 calling the environment's ``obtain(req, installer)`` method will be 

1051 returned. 

1052 """ 

1053 try: 

1054 dist = working_set.find(req) 

1055 except VersionConflict: 

1056 if not replace_conflicting: 

1057 raise 

1058 dist = None 

1059 if dist is not None: 

1060 return dist 

1061 for dist in self[req.key]: 

1062 if dist in req: 

1063 return dist 

1064 # try to download/install 

1065 return self.obtain(req, installer) 

1066 

1067 def obtain(self, requirement, installer=None): 

1068 """Obtain a distribution matching `requirement` (e.g. via download) 

1069 

1070 Obtain a distro that matches requirement (e.g. via download). In the 

1071 base ``Environment`` class, this routine just returns 

1072 ``installer(requirement)``, unless `installer` is None, in which case 

1073 None is returned instead. This method is a hook that allows subclasses 

1074 to attempt other ways of obtaining a distribution before falling back 

1075 to the `installer` argument.""" 

1076 if installer is not None: 

1077 return installer(requirement) 

1078 

1079 def __iter__(self): 

1080 """Yield the unique project names of the available distributions""" 

1081 for key in self._distmap.keys(): 

1082 if self[key]: 

1083 yield key 

1084 

1085 def __iadd__(self, other): 

1086 """In-place addition of a distribution or environment""" 

1087 if isinstance(other, Distribution): 

1088 self.add(other) 

1089 elif isinstance(other, Environment): 

1090 for project in other: 

1091 for dist in other[project]: 

1092 self.add(dist) 

1093 else: 

1094 raise TypeError("Can't add %r to environment" % (other,)) 

1095 return self 

1096 

1097 def __add__(self, other): 

1098 """Add an environment or distribution to an environment""" 

1099 new = self.__class__([], platform=None, python=None) 

1100 for env in self, other: 

1101 new += env 

1102 return new 

1103 

1104 

1105# XXX backward compatibility 

1106AvailableDistributions = Environment 

1107 

1108 

1109class ExtractionError(RuntimeError): 

1110 """An error occurred extracting a resource 

1111 

1112 The following attributes are available from instances of this exception: 

1113 

1114 manager 

1115 The resource manager that raised this exception 

1116 

1117 cache_path 

1118 The base directory for resource extraction 

1119 

1120 original_error 

1121 The exception instance that caused extraction to fail 

1122 """ 

1123 

1124 

1125class ResourceManager: 

1126 """Manage resource extraction and packages""" 

1127 extraction_path = None 

1128 

1129 def __init__(self): 

1130 self.cached_files = {} 

1131 

1132 def resource_exists(self, package_or_requirement, resource_name): 

1133 """Does the named resource exist?""" 

1134 return get_provider(package_or_requirement).has_resource(resource_name) 

1135 

1136 def resource_isdir(self, package_or_requirement, resource_name): 

1137 """Is the named resource an existing directory?""" 

1138 return get_provider(package_or_requirement).resource_isdir( 

1139 resource_name 

1140 ) 

1141 

1142 def resource_filename(self, package_or_requirement, resource_name): 

1143 """Return a true filesystem path for specified resource""" 

1144 return get_provider(package_or_requirement).get_resource_filename( 

1145 self, resource_name 

1146 ) 

1147 

1148 def resource_stream(self, package_or_requirement, resource_name): 

1149 """Return a readable file-like object for specified resource""" 

1150 return get_provider(package_or_requirement).get_resource_stream( 

1151 self, resource_name 

1152 ) 

1153 

1154 def resource_string(self, package_or_requirement, resource_name): 

1155 """Return specified resource as a string""" 

1156 return get_provider(package_or_requirement).get_resource_string( 

1157 self, resource_name 

1158 ) 

1159 

1160 def resource_listdir(self, package_or_requirement, resource_name): 

1161 """List the contents of the named resource directory""" 

1162 return get_provider(package_or_requirement).resource_listdir( 

1163 resource_name 

1164 ) 

1165 

1166 def extraction_error(self): 

1167 """Give an error message for problems extracting file(s)""" 

1168 

1169 old_exc = sys.exc_info()[1] 

1170 cache_path = self.extraction_path or get_default_cache() 

1171 

1172 tmpl = textwrap.dedent(""" 

1173 Can't extract file(s) to egg cache 

1174 

1175 The following error occurred while trying to extract file(s) 

1176 to the Python egg cache: 

1177 

1178 {old_exc} 

1179 

1180 The Python egg cache directory is currently set to: 

1181 

1182 {cache_path} 

1183 

1184 Perhaps your account does not have write access to this directory? 

1185 You can change the cache directory by setting the PYTHON_EGG_CACHE 

1186 environment variable to point to an accessible directory. 

1187 """).lstrip() 

1188 err = ExtractionError(tmpl.format(**locals())) 

1189 err.manager = self 

1190 err.cache_path = cache_path 

1191 err.original_error = old_exc 

1192 raise err 

1193 

1194 def get_cache_path(self, archive_name, names=()): 

1195 """Return absolute location in cache for `archive_name` and `names` 

1196 

1197 The parent directory of the resulting path will be created if it does 

1198 not already exist. `archive_name` should be the base filename of the 

1199 enclosing egg (which may not be the name of the enclosing zipfile!), 

1200 including its ".egg" extension. `names`, if provided, should be a 

1201 sequence of path name parts "under" the egg's extraction location. 

1202 

1203 This method should only be called by resource providers that need to 

1204 obtain an extraction location, and only for names they intend to 

1205 extract, as it tracks the generated names for possible cleanup later. 

1206 """ 

1207 extract_path = self.extraction_path or get_default_cache() 

1208 target_path = os.path.join(extract_path, archive_name + '-tmp', *names) 

1209 try: 

1210 _bypass_ensure_directory(target_path) 

1211 except Exception: 

1212 self.extraction_error() 

1213 

1214 self._warn_unsafe_extraction_path(extract_path) 

1215 

1216 self.cached_files[target_path] = 1 

1217 return target_path 

1218 

1219 @staticmethod 

1220 def _warn_unsafe_extraction_path(path): 

1221 """ 

1222 If the default extraction path is overridden and set to an insecure 

1223 location, such as /tmp, it opens up an opportunity for an attacker to 

1224 replace an extracted file with an unauthorized payload. Warn the user 

1225 if a known insecure location is used. 

1226 

1227 See Distribute #375 for more details. 

1228 """ 

1229 if os.name == 'nt' and not path.startswith(os.environ['windir']): 

1230 # On Windows, permissions are generally restrictive by default 

1231 # and temp directories are not writable by other users, so 

1232 # bypass the warning. 

1233 return 

1234 mode = os.stat(path).st_mode 

1235 if mode & stat.S_IWOTH or mode & stat.S_IWGRP: 

1236 msg = ( 

1237 "%s is writable by group/others and vulnerable to attack " 

1238 "when " 

1239 "used with get_resource_filename. Consider a more secure " 

1240 "location (set with .set_extraction_path or the " 

1241 "PYTHON_EGG_CACHE environment variable)." % path 

1242 ) 

1243 warnings.warn(msg, UserWarning) 

1244 

1245 def postprocess(self, tempname, filename): 

1246 """Perform any platform-specific postprocessing of `tempname` 

1247 

1248 This is where Mac header rewrites should be done; other platforms don't 

1249 have anything special they should do. 

1250 

1251 Resource providers should call this method ONLY after successfully 

1252 extracting a compressed resource. They must NOT call it on resources 

1253 that are already in the filesystem. 

1254 

1255 `tempname` is the current (temporary) name of the file, and `filename` 

1256 is the name it will be renamed to by the caller after this routine 

1257 returns. 

1258 """ 

1259 

1260 if os.name == 'posix': 

1261 # Make the resource executable 

1262 mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777 

1263 os.chmod(tempname, mode) 

1264 

1265 def set_extraction_path(self, path): 

1266 """Set the base path where resources will be extracted to, if needed. 

1267 

1268 If you do not call this routine before any extractions take place, the 

1269 path defaults to the return value of ``get_default_cache()``. (Which 

1270 is based on the ``PYTHON_EGG_CACHE`` environment variable, with various 

1271 platform-specific fallbacks. See that routine's documentation for more 

1272 details.) 

1273 

1274 Resources are extracted to subdirectories of this path based upon 

1275 information given by the ``IResourceProvider``. You may set this to a 

1276 temporary directory, but then you must call ``cleanup_resources()`` to 

1277 delete the extracted files when done. There is no guarantee that 

1278 ``cleanup_resources()`` will be able to remove all extracted files. 

1279 

1280 (Note: you may not change the extraction path for a given resource 

1281 manager once resources have been extracted, unless you first call 

1282 ``cleanup_resources()``.) 

1283 """ 

1284 if self.cached_files: 

1285 raise ValueError( 

1286 "Can't change extraction path, files already extracted" 

1287 ) 

1288 

1289 self.extraction_path = path 

1290 

1291 def cleanup_resources(self, force=False): 

1292 """ 

1293 Delete all extracted resource files and directories, returning a list 

1294 of the file and directory names that could not be successfully removed. 

1295 This function does not have any concurrency protection, so it should 

1296 generally only be called when the extraction path is a temporary 

1297 directory exclusive to a single process. This method is not 

1298 automatically called; you must call it explicitly or register it as an 

1299 ``atexit`` function if you wish to ensure cleanup of a temporary 

1300 directory used for extractions. 

1301 """ 

1302 # XXX 

1303 

1304 

1305def get_default_cache(): 

1306 """ 

1307 Return the ``PYTHON_EGG_CACHE`` environment variable 

1308 or a platform-relevant user cache dir for an app 

1309 named "Python-Eggs". 

1310 """ 

1311 return ( 

1312 os.environ.get('PYTHON_EGG_CACHE') 

1313 or appdirs.user_cache_dir(appname='Python-Eggs') 

1314 ) 

1315 

1316 

1317def safe_name(name): 

1318 """Convert an arbitrary string to a standard distribution name 

1319 

1320 Any runs of non-alphanumeric/. characters are replaced with a single '-'. 

1321 """ 

1322 return re.sub('[^A-Za-z0-9.]+', '-', name) 

1323 

1324 

1325def safe_version(version): 

1326 """ 

1327 Convert an arbitrary string to a standard version string 

1328 """ 

1329 try: 

1330 # normalize the version 

1331 return str(packaging.version.Version(version)) 

1332 except packaging.version.InvalidVersion: 

1333 version = version.replace(' ', '.') 

1334 return re.sub('[^A-Za-z0-9.]+', '-', version) 

1335 

1336 

1337def safe_extra(extra): 

1338 """Convert an arbitrary string to a standard 'extra' name 

1339 

1340 Any runs of non-alphanumeric characters are replaced with a single '_', 

1341 and the result is always lowercased. 

1342 """ 

1343 return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower() 

1344 

1345 

1346def to_filename(name): 

1347 """Convert a project or version name to its filename-escaped form 

1348 

1349 Any '-' characters are currently replaced with '_'. 

1350 """ 

1351 return name.replace('-', '_') 

1352 

1353 

1354def invalid_marker(text): 

1355 """ 

1356 Validate text as a PEP 508 environment marker; return an exception 

1357 if invalid or False otherwise. 

1358 """ 

1359 try: 

1360 evaluate_marker(text) 

1361 except SyntaxError as e: 

1362 e.filename = None 

1363 e.lineno = None 

1364 return e 

1365 return False 

1366 

1367 

1368def evaluate_marker(text, extra=None): 

1369 """ 

1370 Evaluate a PEP 508 environment marker. 

1371 Return a boolean indicating the marker result in this environment. 

1372 Raise SyntaxError if marker is invalid. 

1373 

1374 This implementation uses the 'pyparsing' module. 

1375 """ 

1376 try: 

1377 marker = packaging.markers.Marker(text) 

1378 return marker.evaluate() 

1379 except packaging.markers.InvalidMarker as e: 

1380 raise SyntaxError(e) 

1381 

1382 

1383class NullProvider: 

1384 """Try to implement resources and metadata for arbitrary PEP 302 loaders""" 

1385 

1386 egg_name = None 

1387 egg_info = None 

1388 loader = None 

1389 

1390 def __init__(self, module): 

1391 self.loader = getattr(module, '__loader__', None) 

1392 self.module_path = os.path.dirname(getattr(module, '__file__', '')) 

1393 

1394 def get_resource_filename(self, manager, resource_name): 

1395 return self._fn(self.module_path, resource_name) 

1396 

1397 def get_resource_stream(self, manager, resource_name): 

1398 return io.BytesIO(self.get_resource_string(manager, resource_name)) 

1399 

1400 def get_resource_string(self, manager, resource_name): 

1401 return self._get(self._fn(self.module_path, resource_name)) 

1402 

1403 def has_resource(self, resource_name): 

1404 return self._has(self._fn(self.module_path, resource_name)) 

1405 

1406 def _get_metadata_path(self, name): 

1407 return self._fn(self.egg_info, name) 

1408 

1409 def has_metadata(self, name): 

1410 if not self.egg_info: 

1411 return self.egg_info 

1412 

1413 path = self._get_metadata_path(name) 

1414 return self._has(path) 

1415 

1416 def get_metadata(self, name): 

1417 if not self.egg_info: 

1418 return "" 

1419 path = self._get_metadata_path(name) 

1420 value = self._get(path) 

1421 if six.PY2: 

1422 return value 

1423 try: 

1424 return value.decode('utf-8') 

1425 except UnicodeDecodeError as exc: 

1426 # Include the path in the error message to simplify 

1427 # troubleshooting, and without changing the exception type. 

1428 exc.reason += ' in {} file at path: {}'.format(name, path) 

1429 raise 

1430 

1431 def get_metadata_lines(self, name): 

1432 return yield_lines(self.get_metadata(name)) 

1433 

1434 def resource_isdir(self, resource_name): 

1435 return self._isdir(self._fn(self.module_path, resource_name)) 

1436 

1437 def metadata_isdir(self, name): 

1438 return self.egg_info and self._isdir(self._fn(self.egg_info, name)) 

1439 

1440 def resource_listdir(self, resource_name): 

1441 return self._listdir(self._fn(self.module_path, resource_name)) 

1442 

1443 def metadata_listdir(self, name): 

1444 if self.egg_info: 

1445 return self._listdir(self._fn(self.egg_info, name)) 

1446 return [] 

1447 

1448 def run_script(self, script_name, namespace): 

1449 script = 'scripts/' + script_name 

1450 if not self.has_metadata(script): 

1451 raise ResolutionError( 

1452 "Script {script!r} not found in metadata at {self.egg_info!r}" 

1453 .format(**locals()), 

1454 ) 

1455 script_text = self.get_metadata(script).replace('\r\n', '\n') 

1456 script_text = script_text.replace('\r', '\n') 

1457 script_filename = self._fn(self.egg_info, script) 

1458 namespace['__file__'] = script_filename 

1459 if os.path.exists(script_filename): 

1460 source = open(script_filename).read() 

1461 code = compile(source, script_filename, 'exec') 

1462 exec(code, namespace, namespace) 

1463 else: 

1464 from linecache import cache 

1465 cache[script_filename] = ( 

1466 len(script_text), 0, script_text.split('\n'), script_filename 

1467 ) 

1468 script_code = compile(script_text, script_filename, 'exec') 

1469 exec(script_code, namespace, namespace) 

1470 

1471 def _has(self, path): 

1472 raise NotImplementedError( 

1473 "Can't perform this operation for unregistered loader type" 

1474 ) 

1475 

1476 def _isdir(self, path): 

1477 raise NotImplementedError( 

1478 "Can't perform this operation for unregistered loader type" 

1479 ) 

1480 

1481 def _listdir(self, path): 

1482 raise NotImplementedError( 

1483 "Can't perform this operation for unregistered loader type" 

1484 ) 

1485 

1486 def _fn(self, base, resource_name): 

1487 self._validate_resource_path(resource_name) 

1488 if resource_name: 

1489 return os.path.join(base, *resource_name.split('/')) 

1490 return base 

1491 

1492 @staticmethod 

1493 def _validate_resource_path(path): 

1494 """ 

1495 Validate the resource paths according to the docs. 

1496 https://setuptools.readthedocs.io/en/latest/pkg_resources.html#basic-resource-access 

1497 

1498 >>> warned = getfixture('recwarn') 

1499 >>> warnings.simplefilter('always') 

1500 >>> vrp = NullProvider._validate_resource_path 

1501 >>> vrp('foo/bar.txt') 

1502 >>> bool(warned) 

1503 False 

1504 >>> vrp('../foo/bar.txt') 

1505 >>> bool(warned) 

1506 True 

1507 >>> warned.clear() 

1508 >>> vrp('/foo/bar.txt') 

1509 >>> bool(warned) 

1510 True 

1511 >>> vrp('foo/../../bar.txt') 

1512 >>> bool(warned) 

1513 True 

1514 >>> warned.clear() 

1515 >>> vrp('foo/f../bar.txt') 

1516 >>> bool(warned) 

1517 False 

1518 

1519 Windows path separators are straight-up disallowed. 

1520 >>> vrp(r'\\foo/bar.txt') 

1521 Traceback (most recent call last): 

1522 ... 

1523 ValueError: Use of .. or absolute path in a resource path \ 

1524is not allowed. 

1525 

1526 >>> vrp(r'C:\\foo/bar.txt') 

1527 Traceback (most recent call last): 

1528 ... 

1529 ValueError: Use of .. or absolute path in a resource path \ 

1530is not allowed. 

1531 

1532 Blank values are allowed 

1533 

1534 >>> vrp('') 

1535 >>> bool(warned) 

1536 False 

1537 

1538 Non-string values are not. 

1539 

1540 >>> vrp(None) 

1541 Traceback (most recent call last): 

1542 ... 

1543 AttributeError: ... 

1544 """ 

1545 invalid = ( 

1546 os.path.pardir in path.split(posixpath.sep) or 

1547 posixpath.isabs(path) or 

1548 ntpath.isabs(path) 

1549 ) 

1550 if not invalid: 

1551 return 

1552 

1553 msg = "Use of .. or absolute path in a resource path is not allowed." 

1554 

1555 # Aggressively disallow Windows absolute paths 

1556 if ntpath.isabs(path) and not posixpath.isabs(path): 

1557 raise ValueError(msg) 

1558 

1559 # for compatibility, warn; in future 

1560 # raise ValueError(msg) 

1561 warnings.warn( 

1562 msg[:-1] + " and will raise exceptions in a future release.", 

1563 DeprecationWarning, 

1564 stacklevel=4, 

1565 ) 

1566 

1567 def _get(self, path): 

1568 if hasattr(self.loader, 'get_data'): 

1569 return self.loader.get_data(path) 

1570 raise NotImplementedError( 

1571 "Can't perform this operation for loaders without 'get_data()'" 

1572 ) 

1573 

1574 

1575register_loader_type(object, NullProvider) 

1576 

1577 

1578class EggProvider(NullProvider): 

1579 """Provider based on a virtual filesystem""" 

1580 

1581 def __init__(self, module): 

1582 NullProvider.__init__(self, module) 

1583 self._setup_prefix() 

1584 

1585 def _setup_prefix(self): 

1586 # we assume here that our metadata may be nested inside a "basket" 

1587 # of multiple eggs; that's why we use module_path instead of .archive 

1588 path = self.module_path 

1589 old = None 

1590 while path != old: 

1591 if _is_egg_path(path): 

1592 self.egg_name = os.path.basename(path) 

1593 self.egg_info = os.path.join(path, 'EGG-INFO') 

1594 self.egg_root = path 

1595 break 

1596 old = path 

1597 path, base = os.path.split(path) 

1598 

1599 

1600class DefaultProvider(EggProvider): 

1601 """Provides access to package resources in the filesystem""" 

1602 

1603 def _has(self, path): 

1604 return os.path.exists(path) 

1605 

1606 def _isdir(self, path): 

1607 return os.path.isdir(path) 

1608 

1609 def _listdir(self, path): 

1610 return os.listdir(path) 

1611 

1612 def get_resource_stream(self, manager, resource_name): 

1613 return open(self._fn(self.module_path, resource_name), 'rb') 

1614 

1615 def _get(self, path): 

1616 with open(path, 'rb') as stream: 

1617 return stream.read() 

1618 

1619 @classmethod 

1620 def _register(cls): 

1621 loader_names = 'SourceFileLoader', 'SourcelessFileLoader', 

1622 for name in loader_names: 

1623 loader_cls = getattr(importlib_machinery, name, type(None)) 

1624 register_loader_type(loader_cls, cls) 

1625 

1626 

1627DefaultProvider._register() 

1628 

1629 

1630class EmptyProvider(NullProvider): 

1631 """Provider that returns nothing for all requests""" 

1632 

1633 module_path = None 

1634 

1635 _isdir = _has = lambda self, path: False 

1636 

1637 def _get(self, path): 

1638 return '' 

1639 

1640 def _listdir(self, path): 

1641 return [] 

1642 

1643 def __init__(self): 

1644 pass 

1645 

1646 

1647empty_provider = EmptyProvider() 

1648 

1649 

1650class ZipManifests(dict): 

1651 """ 

1652 zip manifest builder 

1653 """ 

1654 

1655 @classmethod 

1656 def build(cls, path): 

1657 """ 

1658 Build a dictionary similar to the zipimport directory 

1659 caches, except instead of tuples, store ZipInfo objects. 

1660 

1661 Use a platform-specific path separator (os.sep) for the path keys 

1662 for compatibility with pypy on Windows. 

1663 """ 

1664 with zipfile.ZipFile(path) as zfile: 

1665 items = ( 

1666 ( 

1667 name.replace('/', os.sep), 

1668 zfile.getinfo(name), 

1669 ) 

1670 for name in zfile.namelist() 

1671 ) 

1672 return dict(items) 

1673 

1674 load = build 

1675 

1676 

1677class MemoizedZipManifests(ZipManifests): 

1678 """ 

1679 Memoized zipfile manifests. 

1680 """ 

1681 manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime') 

1682 

1683 def load(self, path): 

1684 """ 

1685 Load a manifest at path or return a suitable manifest already loaded. 

1686 """ 

1687 path = os.path.normpath(path) 

1688 mtime = os.stat(path).st_mtime 

1689 

1690 if path not in self or self[path].mtime != mtime: 

1691 manifest = self.build(path) 

1692 self[path] = self.manifest_mod(manifest, mtime) 

1693 

1694 return self[path].manifest 

1695 

1696 

1697class ZipProvider(EggProvider): 

1698 """Resource support for zips and eggs""" 

1699 

1700 eagers = None 

1701 _zip_manifests = MemoizedZipManifests() 

1702 

1703 def __init__(self, module): 

1704 EggProvider.__init__(self, module) 

1705 self.zip_pre = self.loader.archive + os.sep 

1706 

1707 def _zipinfo_name(self, fspath): 

1708 # Convert a virtual filename (full path to file) into a zipfile subpath 

1709 # usable with the zipimport directory cache for our target archive 

1710 fspath = fspath.rstrip(os.sep) 

1711 if fspath == self.loader.archive: 

1712 return '' 

1713 if fspath.startswith(self.zip_pre): 

1714 return fspath[len(self.zip_pre):] 

1715 raise AssertionError( 

1716 "%s is not a subpath of %s" % (fspath, self.zip_pre) 

1717 ) 

1718 

1719 def _parts(self, zip_path): 

1720 # Convert a zipfile subpath into an egg-relative path part list. 

1721 # pseudo-fs path 

1722 fspath = self.zip_pre + zip_path 

1723 if fspath.startswith(self.egg_root + os.sep): 

1724 return fspath[len(self.egg_root) + 1:].split(os.sep) 

1725 raise AssertionError( 

1726 "%s is not a subpath of %s" % (fspath, self.egg_root) 

1727 ) 

1728 

1729 @property 

1730 def zipinfo(self): 

1731 return self._zip_manifests.load(self.loader.archive) 

1732 

1733 def get_resource_filename(self, manager, resource_name): 

1734 if not self.egg_name: 

1735 raise NotImplementedError( 

1736 "resource_filename() only supported for .egg, not .zip" 

1737 ) 

1738 # no need to lock for extraction, since we use temp names 

1739 zip_path = self._resource_to_zip(resource_name) 

1740 eagers = self._get_eager_resources() 

1741 if '/'.join(self._parts(zip_path)) in eagers: 

1742 for name in eagers: 

1743 self._extract_resource(manager, self._eager_to_zip(name)) 

1744 return self._extract_resource(manager, zip_path) 

1745 

1746 @staticmethod 

1747 def _get_date_and_size(zip_stat): 

1748 size = zip_stat.file_size 

1749 # ymdhms+wday, yday, dst 

1750 date_time = zip_stat.date_time + (0, 0, -1) 

1751 # 1980 offset already done 

1752 timestamp = time.mktime(date_time) 

1753 return timestamp, size 

1754 

1755 def _extract_resource(self, manager, zip_path): 

1756 

1757 if zip_path in self._index(): 

1758 for name in self._index()[zip_path]: 

1759 last = self._extract_resource( 

1760 manager, os.path.join(zip_path, name) 

1761 ) 

1762 # return the extracted directory name 

1763 return os.path.dirname(last) 

1764 

1765 timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) 

1766 

1767 if not WRITE_SUPPORT: 

1768 raise IOError('"os.rename" and "os.unlink" are not supported ' 

1769 'on this platform') 

1770 try: 

1771 

1772 real_path = manager.get_cache_path( 

1773 self.egg_name, self._parts(zip_path) 

1774 ) 

1775 

1776 if self._is_current(real_path, zip_path): 

1777 return real_path 

1778 

1779 outf, tmpnam = _mkstemp( 

1780 ".$extract", 

1781 dir=os.path.dirname(real_path), 

1782 ) 

1783 os.write(outf, self.loader.get_data(zip_path)) 

1784 os.close(outf) 

1785 utime(tmpnam, (timestamp, timestamp)) 

1786 manager.postprocess(tmpnam, real_path) 

1787 

1788 try: 

1789 rename(tmpnam, real_path) 

1790 

1791 except os.error: 

1792 if os.path.isfile(real_path): 

1793 if self._is_current(real_path, zip_path): 

1794 # the file became current since it was checked above, 

1795 # so proceed. 

1796 return real_path 

1797 # Windows, del old file and retry 

1798 elif os.name == 'nt': 

1799 unlink(real_path) 

1800 rename(tmpnam, real_path) 

1801 return real_path 

1802 raise 

1803 

1804 except os.error: 

1805 # report a user-friendly error 

1806 manager.extraction_error() 

1807 

1808 return real_path 

1809 

1810 def _is_current(self, file_path, zip_path): 

1811 """ 

1812 Return True if the file_path is current for this zip_path 

1813 """ 

1814 timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) 

1815 if not os.path.isfile(file_path): 

1816 return False 

1817 stat = os.stat(file_path) 

1818 if stat.st_size != size or stat.st_mtime != timestamp: 

1819 return False 

1820 # check that the contents match 

1821 zip_contents = self.loader.get_data(zip_path) 

1822 with open(file_path, 'rb') as f: 

1823 file_contents = f.read() 

1824 return zip_contents == file_contents 

1825 

1826 def _get_eager_resources(self): 

1827 if self.eagers is None: 

1828 eagers = [] 

1829 for name in ('native_libs.txt', 'eager_resources.txt'): 

1830 if self.has_metadata(name): 

1831 eagers.extend(self.get_metadata_lines(name)) 

1832 self.eagers = eagers 

1833 return self.eagers 

1834 

1835 def _index(self): 

1836 try: 

1837 return self._dirindex 

1838 except AttributeError: 

1839 ind = {} 

1840 for path in self.zipinfo: 

1841 parts = path.split(os.sep) 

1842 while parts: 

1843 parent = os.sep.join(parts[:-1]) 

1844 if parent in ind: 

1845 ind[parent].append(parts[-1]) 

1846 break 

1847 else: 

1848 ind[parent] = [parts.pop()] 

1849 self._dirindex = ind 

1850 return ind 

1851 

1852 def _has(self, fspath): 

1853 zip_path = self._zipinfo_name(fspath) 

1854 return zip_path in self.zipinfo or zip_path in self._index() 

1855 

1856 def _isdir(self, fspath): 

1857 return self._zipinfo_name(fspath) in self._index() 

1858 

1859 def _listdir(self, fspath): 

1860 return list(self._index().get(self._zipinfo_name(fspath), ())) 

1861 

1862 def _eager_to_zip(self, resource_name): 

1863 return self._zipinfo_name(self._fn(self.egg_root, resource_name)) 

1864 

1865 def _resource_to_zip(self, resource_name): 

1866 return self._zipinfo_name(self._fn(self.module_path, resource_name)) 

1867 

1868 

1869register_loader_type(zipimport.zipimporter, ZipProvider) 

1870 

1871 

1872class FileMetadata(EmptyProvider): 

1873 """Metadata handler for standalone PKG-INFO files 

1874 

1875 Usage:: 

1876 

1877 metadata = FileMetadata("/path/to/PKG-INFO") 

1878 

1879 This provider rejects all data and metadata requests except for PKG-INFO, 

1880 which is treated as existing, and will be the contents of the file at 

1881 the provided location. 

1882 """ 

1883 

1884 def __init__(self, path): 

1885 self.path = path 

1886 

1887 def _get_metadata_path(self, name): 

1888 return self.path 

1889 

1890 def has_metadata(self, name): 

1891 return name == 'PKG-INFO' and os.path.isfile(self.path) 

1892 

1893 def get_metadata(self, name): 

1894 if name != 'PKG-INFO': 

1895 raise KeyError("No metadata except PKG-INFO is available") 

1896 

1897 with io.open(self.path, encoding='utf-8', errors="replace") as f: 

1898 metadata = f.read() 

1899 self._warn_on_replacement(metadata) 

1900 return metadata 

1901 

1902 def _warn_on_replacement(self, metadata): 

1903 # Python 2.7 compat for: replacement_char = '�' 

1904 replacement_char = b'\xef\xbf\xbd'.decode('utf-8') 

1905 if replacement_char in metadata: 

1906 tmpl = "{self.path} could not be properly decoded in UTF-8" 

1907 msg = tmpl.format(**locals()) 

1908 warnings.warn(msg) 

1909 

1910 def get_metadata_lines(self, name): 

1911 return yield_lines(self.get_metadata(name)) 

1912 

1913 

1914class PathMetadata(DefaultProvider): 

1915 """Metadata provider for egg directories 

1916 

1917 Usage:: 

1918 

1919 # Development eggs: 

1920 

1921 egg_info = "/path/to/PackageName.egg-info" 

1922 base_dir = os.path.dirname(egg_info) 

1923 metadata = PathMetadata(base_dir, egg_info) 

1924 dist_name = os.path.splitext(os.path.basename(egg_info))[0] 

1925 dist = Distribution(basedir, project_name=dist_name, metadata=metadata) 

1926 

1927 # Unpacked egg directories: 

1928 

1929 egg_path = "/path/to/PackageName-ver-pyver-etc.egg" 

1930 metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO')) 

1931 dist = Distribution.from_filename(egg_path, metadata=metadata) 

1932 """ 

1933 

1934 def __init__(self, path, egg_info): 

1935 self.module_path = path 

1936 self.egg_info = egg_info 

1937 

1938 

1939class EggMetadata(ZipProvider): 

1940 """Metadata provider for .egg files""" 

1941 

1942 def __init__(self, importer): 

1943 """Create a metadata provider from a zipimporter""" 

1944 

1945 self.zip_pre = importer.archive + os.sep 

1946 self.loader = importer 

1947 if importer.prefix: 

1948 self.module_path = os.path.join(importer.archive, importer.prefix) 

1949 else: 

1950 self.module_path = importer.archive 

1951 self._setup_prefix() 

1952 

1953 

1954_declare_state('dict', _distribution_finders={}) 

1955 

1956 

1957def register_finder(importer_type, distribution_finder): 

1958 """Register `distribution_finder` to find distributions in sys.path items 

1959 

1960 `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item 

1961 handler), and `distribution_finder` is a callable that, passed a path 

1962 item and the importer instance, yields ``Distribution`` instances found on 

1963 that path item. See ``pkg_resources.find_on_path`` for an example.""" 

1964 _distribution_finders[importer_type] = distribution_finder 

1965 

1966 

1967def find_distributions(path_item, only=False): 

1968 """Yield distributions accessible via `path_item`""" 

1969 importer = get_importer(path_item) 

1970 finder = _find_adapter(_distribution_finders, importer) 

1971 return finder(importer, path_item, only) 

1972 

1973 

1974def find_eggs_in_zip(importer, path_item, only=False): 

1975 """ 

1976 Find eggs in zip files; possibly multiple nested eggs. 

1977 """ 

1978 if importer.archive.endswith('.whl'): 

1979 # wheels are not supported with this finder 

1980 # they don't have PKG-INFO metadata, and won't ever contain eggs 

1981 return 

1982 metadata = EggMetadata(importer) 

1983 if metadata.has_metadata('PKG-INFO'): 

1984 yield Distribution.from_filename(path_item, metadata=metadata) 

1985 if only: 

1986 # don't yield nested distros 

1987 return 

1988 for subitem in metadata.resource_listdir(''): 

1989 if _is_egg_path(subitem): 

1990 subpath = os.path.join(path_item, subitem) 

1991 dists = find_eggs_in_zip(zipimport.zipimporter(subpath), subpath) 

1992 for dist in dists: 

1993 yield dist 

1994 elif subitem.lower().endswith('.dist-info'): 

1995 subpath = os.path.join(path_item, subitem) 

1996 submeta = EggMetadata(zipimport.zipimporter(subpath)) 

1997 submeta.egg_info = subpath 

1998 yield Distribution.from_location(path_item, subitem, submeta) 

1999 

2000 

2001register_finder(zipimport.zipimporter, find_eggs_in_zip) 

2002 

2003 

2004def find_nothing(importer, path_item, only=False): 

2005 return () 

2006 

2007 

2008register_finder(object, find_nothing) 

2009 

2010 

2011def _by_version_descending(names): 

2012 """ 

2013 Given a list of filenames, return them in descending order 

2014 by version number. 

2015 

2016 >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg' 

2017 >>> _by_version_descending(names) 

2018 ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar'] 

2019 >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg' 

2020 >>> _by_version_descending(names) 

2021 ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg'] 

2022 >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg' 

2023 >>> _by_version_descending(names) 

2024 ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg'] 

2025 """ 

2026 def _by_version(name): 

2027 """ 

2028 Parse each component of the filename 

2029 """ 

2030 name, ext = os.path.splitext(name) 

2031 parts = itertools.chain(name.split('-'), [ext]) 

2032 return [packaging.version.parse(part) for part in parts] 

2033 

2034 return sorted(names, key=_by_version, reverse=True) 

2035 

2036 

2037def find_on_path(importer, path_item, only=False): 

2038 """Yield distributions accessible on a sys.path directory""" 

2039 path_item = _normalize_cached(path_item) 

2040 

2041 if _is_unpacked_egg(path_item): 

2042 yield Distribution.from_filename( 

2043 path_item, metadata=PathMetadata( 

2044 path_item, os.path.join(path_item, 'EGG-INFO') 

2045 ) 

2046 ) 

2047 return 

2048 

2049 entries = safe_listdir(path_item) 

2050 

2051 # for performance, before sorting by version, 

2052 # screen entries for only those that will yield 

2053 # distributions 

2054 filtered = ( 

2055 entry 

2056 for entry in entries 

2057 if dist_factory(path_item, entry, only) 

2058 ) 

2059 

2060 # scan for .egg and .egg-info in directory 

2061 path_item_entries = _by_version_descending(filtered) 

2062 for entry in path_item_entries: 

2063 fullpath = os.path.join(path_item, entry) 

2064 factory = dist_factory(path_item, entry, only) 

2065 for dist in factory(fullpath): 

2066 yield dist 

2067 

2068 

2069def dist_factory(path_item, entry, only): 

2070 """ 

2071 Return a dist_factory for a path_item and entry 

2072 """ 

2073 lower = entry.lower() 

2074 is_meta = any(map(lower.endswith, ('.egg-info', '.dist-info'))) 

2075 return ( 

2076 distributions_from_metadata 

2077 if is_meta else 

2078 find_distributions 

2079 if not only and _is_egg_path(entry) else 

2080 resolve_egg_link 

2081 if not only and lower.endswith('.egg-link') else 

2082 NoDists() 

2083 ) 

2084 

2085 

2086class NoDists: 

2087 """ 

2088 >>> bool(NoDists()) 

2089 False 

2090 

2091 >>> list(NoDists()('anything')) 

2092 [] 

2093 """ 

2094 def __bool__(self): 

2095 return False 

2096 if six.PY2: 

2097 __nonzero__ = __bool__ 

2098 

2099 def __call__(self, fullpath): 

2100 return iter(()) 

2101 

2102 

2103def safe_listdir(path): 

2104 """ 

2105 Attempt to list contents of path, but suppress some exceptions. 

2106 """ 

2107 try: 

2108 return os.listdir(path) 

2109 except (PermissionError, NotADirectoryError): 

2110 pass 

2111 except OSError as e: 

2112 # Ignore the directory if does not exist, not a directory or 

2113 # permission denied 

2114 ignorable = ( 

2115 e.errno in (errno.ENOTDIR, errno.EACCES, errno.ENOENT) 

2116 # Python 2 on Windows needs to be handled this way :( 

2117 or getattr(e, "winerror", None) == 267 

2118 ) 

2119 if not ignorable: 

2120 raise 

2121 return () 

2122 

2123 

2124def distributions_from_metadata(path): 

2125 root = os.path.dirname(path) 

2126 if os.path.isdir(path): 

2127 if len(os.listdir(path)) == 0: 

2128 # empty metadata dir; skip 

2129 return 

2130 metadata = PathMetadata(root, path) 

2131 else: 

2132 metadata = FileMetadata(path) 

2133 entry = os.path.basename(path) 

2134 yield Distribution.from_location( 

2135 root, entry, metadata, precedence=DEVELOP_DIST, 

2136 ) 

2137 

2138 

2139def non_empty_lines(path): 

2140 """ 

2141 Yield non-empty lines from file at path 

2142 """ 

2143 with open(path) as f: 

2144 for line in f: 

2145 line = line.strip() 

2146 if line: 

2147 yield line 

2148 

2149 

2150def resolve_egg_link(path): 

2151 """ 

2152 Given a path to an .egg-link, resolve distributions 

2153 present in the referenced path. 

2154 """ 

2155 referenced_paths = non_empty_lines(path) 

2156 resolved_paths = ( 

2157 os.path.join(os.path.dirname(path), ref) 

2158 for ref in referenced_paths 

2159 ) 

2160 dist_groups = map(find_distributions, resolved_paths) 

2161 return next(dist_groups, ()) 

2162 

2163 

2164register_finder(pkgutil.ImpImporter, find_on_path) 

2165 

2166if hasattr(importlib_machinery, 'FileFinder'): 

2167 register_finder(importlib_machinery.FileFinder, find_on_path) 

2168 

2169_declare_state('dict', _namespace_handlers={}) 

2170_declare_state('dict', _namespace_packages={}) 

2171 

2172 

2173def register_namespace_handler(importer_type, namespace_handler): 

2174 """Register `namespace_handler` to declare namespace packages 

2175 

2176 `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item 

2177 handler), and `namespace_handler` is a callable like this:: 

2178 

2179 def namespace_handler(importer, path_entry, moduleName, module): 

2180 # return a path_entry to use for child packages 

2181 

2182 Namespace handlers are only called if the importer object has already 

2183 agreed that it can handle the relevant path item, and they should only 

2184 return a subpath if the module __path__ does not already contain an 

2185 equivalent subpath. For an example namespace handler, see 

2186 ``pkg_resources.file_ns_handler``. 

2187 """ 

2188 _namespace_handlers[importer_type] = namespace_handler 

2189 

2190 

2191def _handle_ns(packageName, path_item): 

2192 """Ensure that named package includes a subpath of path_item (if needed)""" 

2193 

2194 importer = get_importer(path_item) 

2195 if importer is None: 

2196 return None 

2197 

2198 # capture warnings due to #1111 

2199 with warnings.catch_warnings(): 

2200 warnings.simplefilter("ignore") 

2201 loader = importer.find_module(packageName) 

2202 

2203 if loader is None: 

2204 return None 

2205 module = sys.modules.get(packageName) 

2206 if module is None: 

2207 module = sys.modules[packageName] = types.ModuleType(packageName) 

2208 module.__path__ = [] 

2209 _set_parent_ns(packageName) 

2210 elif not hasattr(module, '__path__'): 

2211 raise TypeError("Not a package:", packageName) 

2212 handler = _find_adapter(_namespace_handlers, importer) 

2213 subpath = handler(importer, path_item, packageName, module) 

2214 if subpath is not None: 

2215 path = module.__path__ 

2216 path.append(subpath) 

2217 loader.load_module(packageName) 

2218 _rebuild_mod_path(path, packageName, module) 

2219 return subpath 

2220 

2221 

2222def _rebuild_mod_path(orig_path, package_name, module): 

2223 """ 

2224 Rebuild module.__path__ ensuring that all entries are ordered 

2225 corresponding to their sys.path order 

2226 """ 

2227 sys_path = [_normalize_cached(p) for p in sys.path] 

2228 

2229 def safe_sys_path_index(entry): 

2230 """ 

2231 Workaround for #520 and #513. 

2232 """ 

2233 try: 

2234 return sys_path.index(entry) 

2235 except ValueError: 

2236 return float('inf') 

2237 

2238 def position_in_sys_path(path): 

2239 """ 

2240 Return the ordinal of the path based on its position in sys.path 

2241 """ 

2242 path_parts = path.split(os.sep) 

2243 module_parts = package_name.count('.') + 1 

2244 parts = path_parts[:-module_parts] 

2245 return safe_sys_path_index(_normalize_cached(os.sep.join(parts))) 

2246 

2247 new_path = sorted(orig_path, key=position_in_sys_path) 

2248 new_path = [_normalize_cached(p) for p in new_path] 

2249 

2250 if isinstance(module.__path__, list): 

2251 module.__path__[:] = new_path 

2252 else: 

2253 module.__path__ = new_path 

2254 

2255 

2256def declare_namespace(packageName): 

2257 """Declare that package 'packageName' is a namespace package""" 

2258 

2259 _imp.acquire_lock() 

2260 try: 

2261 if packageName in _namespace_packages: 

2262 return 

2263 

2264 path = sys.path 

2265 parent, _, _ = packageName.rpartition('.') 

2266 

2267 if parent: 

2268 declare_namespace(parent) 

2269 if parent not in _namespace_packages: 

2270 __import__(parent) 

2271 try: 

2272 path = sys.modules[parent].__path__ 

2273 except AttributeError: 

2274 raise TypeError("Not a package:", parent) 

2275 

2276 # Track what packages are namespaces, so when new path items are added, 

2277 # they can be updated 

2278 _namespace_packages.setdefault(parent or None, []).append(packageName) 

2279 _namespace_packages.setdefault(packageName, []) 

2280 

2281 for path_item in path: 

2282 # Ensure all the parent's path items are reflected in the child, 

2283 # if they apply 

2284 _handle_ns(packageName, path_item) 

2285 

2286 finally: 

2287 _imp.release_lock() 

2288 

2289 

2290def fixup_namespace_packages(path_item, parent=None): 

2291 """Ensure that previously-declared namespace packages include path_item""" 

2292 _imp.acquire_lock() 

2293 try: 

2294 for package in _namespace_packages.get(parent, ()): 

2295 subpath = _handle_ns(package, path_item) 

2296 if subpath: 

2297 fixup_namespace_packages(subpath, package) 

2298 finally: 

2299 _imp.release_lock() 

2300 

2301 

2302def file_ns_handler(importer, path_item, packageName, module): 

2303 """Compute an ns-package subpath for a filesystem or zipfile importer""" 

2304 

2305 subpath = os.path.join(path_item, packageName.split('.')[-1]) 

2306 normalized = _normalize_cached(subpath) 

2307 for item in module.__path__: 

2308 if _normalize_cached(item) == normalized: 

2309 break 

2310 else: 

2311 # Only return the path if it's not already there 

2312 return subpath 

2313 

2314 

2315register_namespace_handler(pkgutil.ImpImporter, file_ns_handler) 

2316register_namespace_handler(zipimport.zipimporter, file_ns_handler) 

2317 

2318if hasattr(importlib_machinery, 'FileFinder'): 

2319 register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler) 

2320 

2321 

2322def null_ns_handler(importer, path_item, packageName, module): 

2323 return None 

2324 

2325 

2326register_namespace_handler(object, null_ns_handler) 

2327 

2328 

2329def normalize_path(filename): 

2330 """Normalize a file/dir name for comparison purposes""" 

2331 return os.path.normcase(os.path.realpath(os.path.normpath(_cygwin_patch(filename)))) 

2332 

2333 

2334def _cygwin_patch(filename): # pragma: nocover 

2335 """ 

2336 Contrary to POSIX 2008, on Cygwin, getcwd (3) contains 

2337 symlink components. Using 

2338 os.path.abspath() works around this limitation. A fix in os.getcwd() 

2339 would probably better, in Cygwin even more so, except 

2340 that this seems to be by design... 

2341 """ 

2342 return os.path.abspath(filename) if sys.platform == 'cygwin' else filename 

2343 

2344 

2345def _normalize_cached(filename, _cache={}): 

2346 try: 

2347 return _cache[filename] 

2348 except KeyError: 

2349 _cache[filename] = result = normalize_path(filename) 

2350 return result 

2351 

2352 

2353def _is_egg_path(path): 

2354 """ 

2355 Determine if given path appears to be an egg. 

2356 """ 

2357 return path.lower().endswith('.egg') 

2358 

2359 

2360def _is_unpacked_egg(path): 

2361 """ 

2362 Determine if given path appears to be an unpacked egg. 

2363 """ 

2364 return ( 

2365 _is_egg_path(path) and 

2366 os.path.isfile(os.path.join(path, 'EGG-INFO', 'PKG-INFO')) 

2367 ) 

2368 

2369 

2370def _set_parent_ns(packageName): 

2371 parts = packageName.split('.') 

2372 name = parts.pop() 

2373 if parts: 

2374 parent = '.'.join(parts) 

2375 setattr(sys.modules[parent], name, sys.modules[packageName]) 

2376 

2377 

2378def yield_lines(strs): 

2379 """Yield non-empty/non-comment lines of a string or sequence""" 

2380 if isinstance(strs, six.string_types): 

2381 for s in strs.splitlines(): 

2382 s = s.strip() 

2383 # skip blank lines/comments 

2384 if s and not s.startswith('#'): 

2385 yield s 

2386 else: 

2387 for ss in strs: 

2388 for s in yield_lines(ss): 

2389 yield s 

2390 

2391 

2392MODULE = re.compile(r"\w+(\.\w+)*$").match 

2393EGG_NAME = re.compile( 

2394 r""" 

2395 (?P<name>[^-]+) ( 

2396 -(?P<ver>[^-]+) ( 

2397 -py(?P<pyver>[^-]+) ( 

2398 -(?P<plat>.+) 

2399 )? 

2400 )? 

2401 )? 

2402 """, 

2403 re.VERBOSE | re.IGNORECASE, 

2404).match 

2405 

2406 

2407class EntryPoint: 

2408 """Object representing an advertised importable object""" 

2409 

2410 def __init__(self, name, module_name, attrs=(), extras=(), dist=None): 

2411 if not MODULE(module_name): 

2412 raise ValueError("Invalid module name", module_name) 

2413 self.name = name 

2414 self.module_name = module_name 

2415 self.attrs = tuple(attrs) 

2416 self.extras = tuple(extras) 

2417 self.dist = dist 

2418 

2419 def __str__(self): 

2420 s = "%s = %s" % (self.name, self.module_name) 

2421 if self.attrs: 

2422 s += ':' + '.'.join(self.attrs) 

2423 if self.extras: 

2424 s += ' [%s]' % ','.join(self.extras) 

2425 return s 

2426 

2427 def __repr__(self): 

2428 return "EntryPoint.parse(%r)" % str(self) 

2429 

2430 def load(self, require=True, *args, **kwargs): 

2431 """ 

2432 Require packages for this EntryPoint, then resolve it. 

2433 """ 

2434 if not require or args or kwargs: 

2435 warnings.warn( 

2436 "Parameters to load are deprecated. Call .resolve and " 

2437 ".require separately.", 

2438 PkgResourcesDeprecationWarning, 

2439 stacklevel=2, 

2440 ) 

2441 if require: 

2442 self.require(*args, **kwargs) 

2443 return self.resolve() 

2444 

2445 def resolve(self): 

2446 """ 

2447 Resolve the entry point from its module and attrs. 

2448 """ 

2449 module = __import__(self.module_name, fromlist=['__name__'], level=0) 

2450 try: 

2451 return functools.reduce(getattr, self.attrs, module) 

2452 except AttributeError as exc: 

2453 raise ImportError(str(exc)) 

2454 

2455 def require(self, env=None, installer=None): 

2456 if self.extras and not self.dist: 

2457 raise UnknownExtra("Can't require() without a distribution", self) 

2458 

2459 # Get the requirements for this entry point with all its extras and 

2460 # then resolve them. We have to pass `extras` along when resolving so 

2461 # that the working set knows what extras we want. Otherwise, for 

2462 # dist-info distributions, the working set will assume that the 

2463 # requirements for that extra are purely optional and skip over them. 

2464 reqs = self.dist.requires(self.extras) 

2465 items = working_set.resolve(reqs, env, installer, extras=self.extras) 

2466 list(map(working_set.add, items)) 

2467 

2468 pattern = re.compile( 

2469 r'\s*' 

2470 r'(?P<name>.+?)\s*' 

2471 r'=\s*' 

2472 r'(?P<module>[\w.]+)\s*' 

2473 r'(:\s*(?P<attr>[\w.]+))?\s*' 

2474 r'(?P<extras>\[.*\])?\s*$' 

2475 ) 

2476 

2477 @classmethod 

2478 def parse(cls, src, dist=None): 

2479 """Parse a single entry point from string `src` 

2480 

2481 Entry point syntax follows the form:: 

2482 

2483 name = some.module:some.attr [extra1, extra2] 

2484 

2485 The entry name and module name are required, but the ``:attrs`` and 

2486 ``[extras]`` parts are optional 

2487 """ 

2488 m = cls.pattern.match(src) 

2489 if not m: 

2490 msg = "EntryPoint must be in 'name=module:attrs [extras]' format" 

2491 raise ValueError(msg, src) 

2492 res = m.groupdict() 

2493 extras = cls._parse_extras(res['extras']) 

2494 attrs = res['attr'].split('.') if res['attr'] else () 

2495 return cls(res['name'], res['module'], attrs, extras, dist) 

2496 

2497 @classmethod 

2498 def _parse_extras(cls, extras_spec): 

2499 if not extras_spec: 

2500 return () 

2501 req = Requirement.parse('x' + extras_spec) 

2502 if req.specs: 

2503 raise ValueError() 

2504 return req.extras 

2505 

2506 @classmethod 

2507 def parse_group(cls, group, lines, dist=None): 

2508 """Parse an entry point group""" 

2509 if not MODULE(group): 

2510 raise ValueError("Invalid group name", group) 

2511 this = {} 

2512 for line in yield_lines(lines): 

2513 ep = cls.parse(line, dist) 

2514 if ep.name in this: 

2515 raise ValueError("Duplicate entry point", group, ep.name) 

2516 this[ep.name] = ep 

2517 return this 

2518 

2519 @classmethod 

2520 def parse_map(cls, data, dist=None): 

2521 """Parse a map of entry point groups""" 

2522 if isinstance(data, dict): 

2523 data = data.items() 

2524 else: 

2525 data = split_sections(data) 

2526 maps = {} 

2527 for group, lines in data: 

2528 if group is None: 

2529 if not lines: 

2530 continue 

2531 raise ValueError("Entry points must be listed in groups") 

2532 group = group.strip() 

2533 if group in maps: 

2534 raise ValueError("Duplicate group name", group) 

2535 maps[group] = cls.parse_group(group, lines, dist) 

2536 return maps 

2537 

2538 

2539def _remove_md5_fragment(location): 

2540 if not location: 

2541 return '' 

2542 parsed = urllib.parse.urlparse(location) 

2543 if parsed[-1].startswith('md5='): 

2544 return urllib.parse.urlunparse(parsed[:-1] + ('',)) 

2545 return location 

2546 

2547 

2548def _version_from_file(lines): 

2549 """ 

2550 Given an iterable of lines from a Metadata file, return 

2551 the value of the Version field, if present, or None otherwise. 

2552 """ 

2553 def is_version_line(line): 

2554 return line.lower().startswith('version:') 

2555 version_lines = filter(is_version_line, lines) 

2556 line = next(iter(version_lines), '') 

2557 _, _, value = line.partition(':') 

2558 return safe_version(value.strip()) or None 

2559 

2560 

2561class Distribution: 

2562 """Wrap an actual or potential sys.path entry w/metadata""" 

2563 PKG_INFO = 'PKG-INFO' 

2564 

2565 def __init__( 

2566 self, location=None, metadata=None, project_name=None, 

2567 version=None, py_version=PY_MAJOR, platform=None, 

2568 precedence=EGG_DIST): 

2569 self.project_name = safe_name(project_name or 'Unknown') 

2570 if version is not None: 

2571 self._version = safe_version(version) 

2572 self.py_version = py_version 

2573 self.platform = platform 

2574 self.location = location 

2575 self.precedence = precedence 

2576 self._provider = metadata or empty_provider 

2577 

2578 @classmethod 

2579 def from_location(cls, location, basename, metadata=None, **kw): 

2580 project_name, version, py_version, platform = [None] * 4 

2581 basename, ext = os.path.splitext(basename) 

2582 if ext.lower() in _distributionImpl: 

2583 cls = _distributionImpl[ext.lower()] 

2584 

2585 match = EGG_NAME(basename) 

2586 if match: 

2587 project_name, version, py_version, platform = match.group( 

2588 'name', 'ver', 'pyver', 'plat' 

2589 ) 

2590 return cls( 

2591 location, metadata, project_name=project_name, version=version, 

2592 py_version=py_version, platform=platform, **kw 

2593 )._reload_version() 

2594 

2595 def _reload_version(self): 

2596 return self 

2597 

2598 @property 

2599 def hashcmp(self): 

2600 return ( 

2601 self.parsed_version, 

2602 self.precedence, 

2603 self.key, 

2604 _remove_md5_fragment(self.location), 

2605 self.py_version or '', 

2606 self.platform or '', 

2607 ) 

2608 

2609 def __hash__(self): 

2610 return hash(self.hashcmp) 

2611 

2612 def __lt__(self, other): 

2613 return self.hashcmp < other.hashcmp 

2614 

2615 def __le__(self, other): 

2616 return self.hashcmp <= other.hashcmp 

2617 

2618 def __gt__(self, other): 

2619 return self.hashcmp > other.hashcmp 

2620 

2621 def __ge__(self, other): 

2622 return self.hashcmp >= other.hashcmp 

2623 

2624 def __eq__(self, other): 

2625 if not isinstance(other, self.__class__): 

2626 # It's not a Distribution, so they are not equal 

2627 return False 

2628 return self.hashcmp == other.hashcmp 

2629 

2630 def __ne__(self, other): 

2631 return not self == other 

2632 

2633 # These properties have to be lazy so that we don't have to load any 

2634 # metadata until/unless it's actually needed. (i.e., some distributions 

2635 # may not know their name or version without loading PKG-INFO) 

2636 

2637 @property 

2638 def key(self): 

2639 try: 

2640 return self._key 

2641 except AttributeError: 

2642 self._key = key = self.project_name.lower() 

2643 return key 

2644 

2645 @property 

2646 def parsed_version(self): 

2647 if not hasattr(self, "_parsed_version"): 

2648 self._parsed_version = parse_version(self.version) 

2649 

2650 return self._parsed_version 

2651 

2652 def _warn_legacy_version(self): 

2653 LV = packaging.version.LegacyVersion 

2654 is_legacy = isinstance(self._parsed_version, LV) 

2655 if not is_legacy: 

2656 return 

2657 

2658 # While an empty version is technically a legacy version and 

2659 # is not a valid PEP 440 version, it's also unlikely to 

2660 # actually come from someone and instead it is more likely that 

2661 # it comes from setuptools attempting to parse a filename and 

2662 # including it in the list. So for that we'll gate this warning 

2663 # on if the version is anything at all or not. 

2664 if not self.version: 

2665 return 

2666 

2667 tmpl = textwrap.dedent(""" 

2668 '{project_name} ({version})' is being parsed as a legacy, 

2669 non PEP 440, 

2670 version. You may find odd behavior and sort order. 

2671 In particular it will be sorted as less than 0.0. It 

2672 is recommended to migrate to PEP 440 compatible 

2673 versions. 

2674 """).strip().replace('\n', ' ') 

2675 

2676 warnings.warn(tmpl.format(**vars(self)), PEP440Warning) 

2677 

2678 @property 

2679 def version(self): 

2680 try: 

2681 return self._version 

2682 except AttributeError: 

2683 version = self._get_version() 

2684 if version is None: 

2685 path = self._get_metadata_path_for_display(self.PKG_INFO) 

2686 msg = ( 

2687 "Missing 'Version:' header and/or {} file at path: {}" 

2688 ).format(self.PKG_INFO, path) 

2689 raise ValueError(msg, self) 

2690 

2691 return version 

2692 

2693 @property 

2694 def _dep_map(self): 

2695 """ 

2696 A map of extra to its list of (direct) requirements 

2697 for this distribution, including the null extra. 

2698 """ 

2699 try: 

2700 return self.__dep_map 

2701 except AttributeError: 

2702 self.__dep_map = self._filter_extras(self._build_dep_map()) 

2703 return self.__dep_map 

2704 

2705 @staticmethod 

2706 def _filter_extras(dm): 

2707 """ 

2708 Given a mapping of extras to dependencies, strip off 

2709 environment markers and filter out any dependencies 

2710 not matching the markers. 

2711 """ 

2712 for extra in list(filter(None, dm)): 

2713 new_extra = extra 

2714 reqs = dm.pop(extra) 

2715 new_extra, _, marker = extra.partition(':') 

2716 fails_marker = marker and ( 

2717 invalid_marker(marker) 

2718 or not evaluate_marker(marker) 

2719 ) 

2720 if fails_marker: 

2721 reqs = [] 

2722 new_extra = safe_extra(new_extra) or None 

2723 

2724 dm.setdefault(new_extra, []).extend(reqs) 

2725 return dm 

2726 

2727 def _build_dep_map(self): 

2728 dm = {} 

2729 for name in 'requires.txt', 'depends.txt': 

2730 for extra, reqs in split_sections(self._get_metadata(name)): 

2731 dm.setdefault(extra, []).extend(parse_requirements(reqs)) 

2732 return dm 

2733 

2734 def requires(self, extras=()): 

2735 """List of Requirements needed for this distro if `extras` are used""" 

2736 dm = self._dep_map 

2737 deps = [] 

2738 deps.extend(dm.get(None, ())) 

2739 for ext in extras: 

2740 try: 

2741 deps.extend(dm[safe_extra(ext)]) 

2742 except KeyError: 

2743 raise UnknownExtra( 

2744 "%s has no such extra feature %r" % (self, ext) 

2745 ) 

2746 return deps 

2747 

2748 def _get_metadata_path_for_display(self, name): 

2749 """ 

2750 Return the path to the given metadata file, if available. 

2751 """ 

2752 try: 

2753 # We need to access _get_metadata_path() on the provider object 

2754 # directly rather than through this class's __getattr__() 

2755 # since _get_metadata_path() is marked private. 

2756 path = self._provider._get_metadata_path(name) 

2757 

2758 # Handle exceptions e.g. in case the distribution's metadata 

2759 # provider doesn't support _get_metadata_path(). 

2760 except Exception: 

2761 return '[could not detect]' 

2762 

2763 return path 

2764 

2765 def _get_metadata(self, name): 

2766 if self.has_metadata(name): 

2767 for line in self.get_metadata_lines(name): 

2768 yield line 

2769 

2770 def _get_version(self): 

2771 lines = self._get_metadata(self.PKG_INFO) 

2772 version = _version_from_file(lines) 

2773 

2774 return version 

2775 

2776 def activate(self, path=None, replace=False): 

2777 """Ensure distribution is importable on `path` (default=sys.path)""" 

2778 if path is None: 

2779 path = sys.path 

2780 self.insert_on(path, replace=replace) 

2781 if path is sys.path: 

2782 fixup_namespace_packages(self.location) 

2783 for pkg in self._get_metadata('namespace_packages.txt'): 

2784 if pkg in sys.modules: 

2785 declare_namespace(pkg) 

2786 

2787 def egg_name(self): 

2788 """Return what this distribution's standard .egg filename should be""" 

2789 filename = "%s-%s-py%s" % ( 

2790 to_filename(self.project_name), to_filename(self.version), 

2791 self.py_version or PY_MAJOR 

2792 ) 

2793 

2794 if self.platform: 

2795 filename += '-' + self.platform 

2796 return filename 

2797 

2798 def __repr__(self): 

2799 if self.location: 

2800 return "%s (%s)" % (self, self.location) 

2801 else: 

2802 return str(self) 

2803 

2804 def __str__(self): 

2805 try: 

2806 version = getattr(self, 'version', None) 

2807 except ValueError: 

2808 version = None 

2809 version = version or "[unknown version]" 

2810 return "%s %s" % (self.project_name, version) 

2811 

2812 def __getattr__(self, attr): 

2813 """Delegate all unrecognized public attributes to .metadata provider""" 

2814 if attr.startswith('_'): 

2815 raise AttributeError(attr) 

2816 return getattr(self._provider, attr) 

2817 

2818 def __dir__(self): 

2819 return list( 

2820 set(super(Distribution, self).__dir__()) 

2821 | set( 

2822 attr for attr in self._provider.__dir__() 

2823 if not attr.startswith('_') 

2824 ) 

2825 ) 

2826 

2827 if not hasattr(object, '__dir__'): 

2828 # python 2.7 not supported 

2829 del __dir__ 

2830 

2831 @classmethod 

2832 def from_filename(cls, filename, metadata=None, **kw): 

2833 return cls.from_location( 

2834 _normalize_cached(filename), os.path.basename(filename), metadata, 

2835 **kw 

2836 ) 

2837 

2838 def as_requirement(self): 

2839 """Return a ``Requirement`` that matches this distribution exactly""" 

2840 if isinstance(self.parsed_version, packaging.version.Version): 

2841 spec = "%s==%s" % (self.project_name, self.parsed_version) 

2842 else: 

2843 spec = "%s===%s" % (self.project_name, self.parsed_version) 

2844 

2845 return Requirement.parse(spec) 

2846 

2847 def load_entry_point(self, group, name): 

2848 """Return the `name` entry point of `group` or raise ImportError""" 

2849 ep = self.get_entry_info(group, name) 

2850 if ep is None: 

2851 raise ImportError("Entry point %r not found" % ((group, name),)) 

2852 return ep.load() 

2853 

2854 def get_entry_map(self, group=None): 

2855 """Return the entry point map for `group`, or the full entry map""" 

2856 try: 

2857 ep_map = self._ep_map 

2858 except AttributeError: 

2859 ep_map = self._ep_map = EntryPoint.parse_map( 

2860 self._get_metadata('entry_points.txt'), self 

2861 ) 

2862 if group is not None: 

2863 return ep_map.get(group, {}) 

2864 return ep_map 

2865 

2866 def get_entry_info(self, group, name): 

2867 """Return the EntryPoint object for `group`+`name`, or ``None``""" 

2868 return self.get_entry_map(group).get(name) 

2869 

2870 def insert_on(self, path, loc=None, replace=False): 

2871 """Ensure self.location is on path 

2872 

2873 If replace=False (default): 

2874 - If location is already in path anywhere, do nothing. 

2875 - Else: 

2876 - If it's an egg and its parent directory is on path, 

2877 insert just ahead of the parent. 

2878 - Else: add to the end of path. 

2879 If replace=True: 

2880 - If location is already on path anywhere (not eggs) 

2881 or higher priority than its parent (eggs) 

2882 do nothing. 

2883 - Else: 

2884 - If it's an egg and its parent directory is on path, 

2885 insert just ahead of the parent, 

2886 removing any lower-priority entries. 

2887 - Else: add it to the front of path. 

2888 """ 

2889 

2890 loc = loc or self.location 

2891 if not loc: 

2892 return 

2893 

2894 nloc = _normalize_cached(loc) 

2895 bdir = os.path.dirname(nloc) 

2896 npath = [(p and _normalize_cached(p) or p) for p in path] 

2897 

2898 for p, item in enumerate(npath): 

2899 if item == nloc: 

2900 if replace: 

2901 break 

2902 else: 

2903 # don't modify path (even removing duplicates) if 

2904 # found and not replace 

2905 return 

2906 elif item == bdir and self.precedence == EGG_DIST: 

2907 # if it's an .egg, give it precedence over its directory 

2908 # UNLESS it's already been added to sys.path and replace=False 

2909 if (not replace) and nloc in npath[p:]: 

2910 return 

2911 if path is sys.path: 

2912 self.check_version_conflict() 

2913 path.insert(p, loc) 

2914 npath.insert(p, nloc) 

2915 break 

2916 else: 

2917 if path is sys.path: 

2918 self.check_version_conflict() 

2919 if replace: 

2920 path.insert(0, loc) 

2921 else: 

2922 path.append(loc) 

2923 return 

2924 

2925 # p is the spot where we found or inserted loc; now remove duplicates 

2926 while True: 

2927 try: 

2928 np = npath.index(nloc, p + 1) 

2929 except ValueError: 

2930 break 

2931 else: 

2932 del npath[np], path[np] 

2933 # ha! 

2934 p = np 

2935 

2936 return 

2937 

2938 def check_version_conflict(self): 

2939 if self.key == 'setuptools': 

2940 # ignore the inevitable setuptools self-conflicts :( 

2941 return 

2942 

2943 nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt')) 

2944 loc = normalize_path(self.location) 

2945 for modname in self._get_metadata('top_level.txt'): 

2946 if (modname not in sys.modules or modname in nsp 

2947 or modname in _namespace_packages): 

2948 continue 

2949 if modname in ('pkg_resources', 'setuptools', 'site'): 

2950 continue 

2951 fn = getattr(sys.modules[modname], '__file__', None) 

2952 if fn and (normalize_path(fn).startswith(loc) or 

2953 fn.startswith(self.location)): 

2954 continue 

2955 issue_warning( 

2956 "Module %s was already imported from %s, but %s is being added" 

2957 " to sys.path" % (modname, fn, self.location), 

2958 ) 

2959 

2960 def has_version(self): 

2961 try: 

2962 self.version 

2963 except ValueError: 

2964 issue_warning("Unbuilt egg for " + repr(self)) 

2965 return False 

2966 return True 

2967 

2968 def clone(self, **kw): 

2969 """Copy this distribution, substituting in any changed keyword args""" 

2970 names = 'project_name version py_version platform location precedence' 

2971 for attr in names.split(): 

2972 kw.setdefault(attr, getattr(self, attr, None)) 

2973 kw.setdefault('metadata', self._provider) 

2974 return self.__class__(**kw) 

2975 

2976 @property 

2977 def extras(self): 

2978 return [dep for dep in self._dep_map if dep] 

2979 

2980 

2981class EggInfoDistribution(Distribution): 

2982 def _reload_version(self): 

2983 """ 

2984 Packages installed by distutils (e.g. numpy or scipy), 

2985 which uses an old safe_version, and so 

2986 their version numbers can get mangled when 

2987 converted to filenames (e.g., 1.11.0.dev0+2329eae to 

2988 1.11.0.dev0_2329eae). These distributions will not be 

2989 parsed properly 

2990 downstream by Distribution and safe_version, so 

2991 take an extra step and try to get the version number from 

2992 the metadata file itself instead of the filename. 

2993 """ 

2994 md_version = self._get_version() 

2995 if md_version: 

2996 self._version = md_version 

2997 return self 

2998 

2999 

3000class DistInfoDistribution(Distribution): 

3001 """ 

3002 Wrap an actual or potential sys.path entry 

3003 w/metadata, .dist-info style. 

3004 """ 

3005 PKG_INFO = 'METADATA' 

3006 EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])") 

3007 

3008 @property 

3009 def _parsed_pkg_info(self): 

3010 """Parse and cache metadata""" 

3011 try: 

3012 return self._pkg_info 

3013 except AttributeError: 

3014 metadata = self.get_metadata(self.PKG_INFO) 

3015 self._pkg_info = email.parser.Parser().parsestr(metadata) 

3016 return self._pkg_info 

3017 

3018 @property 

3019 def _dep_map(self): 

3020 try: 

3021 return self.__dep_map 

3022 except AttributeError: 

3023 self.__dep_map = self._compute_dependencies() 

3024 return self.__dep_map 

3025 

3026 def _compute_dependencies(self): 

3027 """Recompute this distribution's dependencies.""" 

3028 dm = self.__dep_map = {None: []} 

3029 

3030 reqs = [] 

3031 # Including any condition expressions 

3032 for req in self._parsed_pkg_info.get_all('Requires-Dist') or []: 

3033 reqs.extend(parse_requirements(req)) 

3034 

3035 def reqs_for_extra(extra): 

3036 for req in reqs: 

3037 if not req.marker or req.marker.evaluate({'extra': extra}): 

3038 yield req 

3039 

3040 common = frozenset(reqs_for_extra(None)) 

3041 dm[None].extend(common) 

3042 

3043 for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []: 

3044 s_extra = safe_extra(extra.strip()) 

3045 dm[s_extra] = list(frozenset(reqs_for_extra(extra)) - common) 

3046 

3047 return dm 

3048 

3049 

3050_distributionImpl = { 

3051 '.egg': Distribution, 

3052 '.egg-info': EggInfoDistribution, 

3053 '.dist-info': DistInfoDistribution, 

3054} 

3055 

3056 

3057def issue_warning(*args, **kw): 

3058 level = 1 

3059 g = globals() 

3060 try: 

3061 # find the first stack frame that is *not* code in 

3062 # the pkg_resources module, to use for the warning 

3063 while sys._getframe(level).f_globals is g: 

3064 level += 1 

3065 except ValueError: 

3066 pass 

3067 warnings.warn(stacklevel=level + 1, *args, **kw) 

3068 

3069 

3070class RequirementParseError(ValueError): 

3071 def __str__(self): 

3072 return ' '.join(self.args) 

3073 

3074 

3075def parse_requirements(strs): 

3076 """Yield ``Requirement`` objects for each specification in `strs` 

3077 

3078 `strs` must be a string, or a (possibly-nested) iterable thereof. 

3079 """ 

3080 # create a steppable iterator, so we can handle \-continuations 

3081 lines = iter(yield_lines(strs)) 

3082 

3083 for line in lines: 

3084 # Drop comments -- a hash without a space may be in a URL. 

3085 if ' #' in line: 

3086 line = line[:line.find(' #')] 

3087 # If there is a line continuation, drop it, and append the next line. 

3088 if line.endswith('\\'): 

3089 line = line[:-2].strip() 

3090 try: 

3091 line += next(lines) 

3092 except StopIteration: 

3093 return 

3094 yield Requirement(line) 

3095 

3096 

3097class Requirement(packaging.requirements.Requirement): 

3098 def __init__(self, requirement_string): 

3099 """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!""" 

3100 try: 

3101 super(Requirement, self).__init__(requirement_string) 

3102 except packaging.requirements.InvalidRequirement as e: 

3103 raise RequirementParseError(str(e)) 

3104 self.unsafe_name = self.name 

3105 project_name = safe_name(self.name) 

3106 self.project_name, self.key = project_name, project_name.lower() 

3107 self.specs = [ 

3108 (spec.operator, spec.version) for spec in self.specifier] 

3109 self.extras = tuple(map(safe_extra, self.extras)) 

3110 self.hashCmp = ( 

3111 self.key, 

3112 self.specifier, 

3113 frozenset(self.extras), 

3114 str(self.marker) if self.marker else None, 

3115 ) 

3116 self.__hash = hash(self.hashCmp) 

3117 

3118 def __eq__(self, other): 

3119 return ( 

3120 isinstance(other, Requirement) and 

3121 self.hashCmp == other.hashCmp 

3122 ) 

3123 

3124 def __ne__(self, other): 

3125 return not self == other 

3126 

3127 def __contains__(self, item): 

3128 if isinstance(item, Distribution): 

3129 if item.key != self.key: 

3130 return False 

3131 

3132 item = item.version 

3133 

3134 # Allow prereleases always in order to match the previous behavior of 

3135 # this method. In the future this should be smarter and follow PEP 440 

3136 # more accurately. 

3137 return self.specifier.contains(item, prereleases=True) 

3138 

3139 def __hash__(self): 

3140 return self.__hash 

3141 

3142 def __repr__(self): 

3143 return "Requirement.parse(%r)" % str(self) 

3144 

3145 @staticmethod 

3146 def parse(s): 

3147 req, = parse_requirements(s) 

3148 return req 

3149 

3150 

3151def _always_object(classes): 

3152 """ 

3153 Ensure object appears in the mro even 

3154 for old-style classes. 

3155 """ 

3156 if object not in classes: 

3157 return classes + (object,) 

3158 return classes 

3159 

3160 

3161def _find_adapter(registry, ob): 

3162 """Return an adapter factory for `ob` from `registry`""" 

3163 types = _always_object(inspect.getmro(getattr(ob, '__class__', type(ob)))) 

3164 for t in types: 

3165 if t in registry: 

3166 return registry[t] 

3167 

3168 

3169def ensure_directory(path): 

3170 """Ensure that the parent directory of `path` exists""" 

3171 dirname = os.path.dirname(path) 

3172 py31compat.makedirs(dirname, exist_ok=True) 

3173 

3174 

3175def _bypass_ensure_directory(path): 

3176 """Sandbox-bypassing version of ensure_directory()""" 

3177 if not WRITE_SUPPORT: 

3178 raise IOError('"os.mkdir" not supported on this platform.') 

3179 dirname, filename = split(path) 

3180 if dirname and filename and not isdir(dirname): 

3181 _bypass_ensure_directory(dirname) 

3182 try: 

3183 mkdir(dirname, 0o755) 

3184 except FileExistsError: 

3185 pass 

3186 

3187 

3188def split_sections(s): 

3189 """Split a string or iterable thereof into (section, content) pairs 

3190 

3191 Each ``section`` is a stripped version of the section header ("[section]") 

3192 and each ``content`` is a list of stripped lines excluding blank lines and 

3193 comment-only lines. If there are any such lines before the first section 

3194 header, they're returned in a first ``section`` of ``None``. 

3195 """ 

3196 section = None 

3197 content = [] 

3198 for line in yield_lines(s): 

3199 if line.startswith("["): 

3200 if line.endswith("]"): 

3201 if section or content: 

3202 yield section, content 

3203 section = line[1:-1].strip() 

3204 content = [] 

3205 else: 

3206 raise ValueError("Invalid section heading", line) 

3207 else: 

3208 content.append(line) 

3209 

3210 # wrap up last segment 

3211 yield section, content 

3212 

3213 

3214def _mkstemp(*args, **kw): 

3215 old_open = os.open 

3216 try: 

3217 # temporarily bypass sandboxing 

3218 os.open = os_open 

3219 return tempfile.mkstemp(*args, **kw) 

3220 finally: 

3221 # and then put it back 

3222 os.open = old_open 

3223 

3224 

3225# Silence the PEP440Warning by default, so that end users don't get hit by it 

3226# randomly just because they use pkg_resources. We want to append the rule 

3227# because we want earlier uses of filterwarnings to take precedence over this 

3228# one. 

3229warnings.filterwarnings("ignore", category=PEP440Warning, append=True) 

3230 

3231 

3232# from jaraco.functools 1.3 

3233def _call_aside(f, *args, **kwargs): 

3234 f(*args, **kwargs) 

3235 return f 

3236 

3237 

3238@_call_aside 

3239def _initialize(g=globals()): 

3240 "Set up global resource manager (deliberately not state-saved)" 

3241 manager = ResourceManager() 

3242 g['_manager'] = manager 

3243 g.update( 

3244 (name, getattr(manager, name)) 

3245 for name in dir(manager) 

3246 if not name.startswith('_') 

3247 ) 

3248 

3249 

3250@_call_aside 

3251def _initialize_master_working_set(): 

3252 """ 

3253 Prepare the master working set and make the ``require()`` 

3254 API available. 

3255 

3256 This function has explicit effects on the global state 

3257 of pkg_resources. It is intended to be invoked once at 

3258 the initialization of this module. 

3259 

3260 Invocation by other packages is unsupported and done 

3261 at their own risk. 

3262 """ 

3263 working_set = WorkingSet._build_master() 

3264 _declare_state('object', working_set=working_set) 

3265 

3266 require = working_set.require 

3267 iter_entry_points = working_set.iter_entry_points 

3268 add_activation_listener = working_set.subscribe 

3269 run_script = working_set.run_script 

3270 # backward compatibility 

3271 run_main = run_script 

3272 # Activate all distributions already on sys.path with replace=False and 

3273 # ensure that all distributions added to the working set in the future 

3274 # (e.g. by calling ``require()``) will get activated as well, 

3275 # with higher priority (replace=True). 

3276 tuple( 

3277 dist.activate(replace=False) 

3278 for dist in working_set 

3279 ) 

3280 add_activation_listener( 

3281 lambda dist: dist.activate(replace=True), 

3282 existing=False, 

3283 ) 

3284 working_set.entries = [] 

3285 # match order 

3286 list(map(working_set.add_entry, sys.path)) 

3287 globals().update(locals()) 

3288 

3289class PkgResourcesDeprecationWarning(Warning): 

3290 """ 

3291 Base class for warning about deprecations in ``pkg_resources`` 

3292 

3293 This class is not derived from ``DeprecationWarning``, and as such is 

3294 visible by default. 

3295 """