Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/pip/_vendor/distlib/util.py: 16%

1266 statements  

« prev     ^ index     » next       coverage.py v7.4.3, created at 2024-02-26 06:33 +0000

1# 

2# Copyright (C) 2012-2023 The Python Software Foundation. 

3# See LICENSE.txt and CONTRIBUTORS.txt. 

4# 

5import codecs 

6from collections import deque 

7import contextlib 

8import csv 

9from glob import iglob as std_iglob 

10import io 

11import json 

12import logging 

13import os 

14import py_compile 

15import re 

16import socket 

17try: 

18 import ssl 

19except ImportError: # pragma: no cover 

20 ssl = None 

21import subprocess 

22import sys 

23import tarfile 

24import tempfile 

25import textwrap 

26 

27try: 

28 import threading 

29except ImportError: # pragma: no cover 

30 import dummy_threading as threading 

31import time 

32 

33from . import DistlibException 

34from .compat import (string_types, text_type, shutil, raw_input, StringIO, 

35 cache_from_source, urlopen, urljoin, httplib, xmlrpclib, 

36 HTTPHandler, BaseConfigurator, valid_ident, 

37 Container, configparser, URLError, ZipFile, fsdecode, 

38 unquote, urlparse) 

39 

40logger = logging.getLogger(__name__) 

41 

42# 

43# Requirement parsing code as per PEP 508 

44# 

45 

46IDENTIFIER = re.compile(r'^([\w\.-]+)\s*') 

47VERSION_IDENTIFIER = re.compile(r'^([\w\.*+-]+)\s*') 

48COMPARE_OP = re.compile(r'^(<=?|>=?|={2,3}|[~!]=)\s*') 

49MARKER_OP = re.compile(r'^((<=?)|(>=?)|={2,3}|[~!]=|in|not\s+in)\s*') 

50OR = re.compile(r'^or\b\s*') 

51AND = re.compile(r'^and\b\s*') 

52NON_SPACE = re.compile(r'(\S+)\s*') 

53STRING_CHUNK = re.compile(r'([\s\w\.{}()*+#:;,/?!~`@$%^&=|<>\[\]-]+)') 

54 

55 

56def parse_marker(marker_string): 

57 """ 

58 Parse a marker string and return a dictionary containing a marker expression. 

59 

60 The dictionary will contain keys "op", "lhs" and "rhs" for non-terminals in 

61 the expression grammar, or strings. A string contained in quotes is to be 

62 interpreted as a literal string, and a string not contained in quotes is a 

63 variable (such as os_name). 

64 """ 

65 

66 def marker_var(remaining): 

67 # either identifier, or literal string 

68 m = IDENTIFIER.match(remaining) 

69 if m: 

70 result = m.groups()[0] 

71 remaining = remaining[m.end():] 

72 elif not remaining: 

73 raise SyntaxError('unexpected end of input') 

74 else: 

75 q = remaining[0] 

76 if q not in '\'"': 

77 raise SyntaxError('invalid expression: %s' % remaining) 

78 oq = '\'"'.replace(q, '') 

79 remaining = remaining[1:] 

80 parts = [q] 

81 while remaining: 

82 # either a string chunk, or oq, or q to terminate 

83 if remaining[0] == q: 

84 break 

85 elif remaining[0] == oq: 

86 parts.append(oq) 

87 remaining = remaining[1:] 

88 else: 

89 m = STRING_CHUNK.match(remaining) 

90 if not m: 

91 raise SyntaxError('error in string literal: %s' % 

92 remaining) 

93 parts.append(m.groups()[0]) 

94 remaining = remaining[m.end():] 

95 else: 

96 s = ''.join(parts) 

97 raise SyntaxError('unterminated string: %s' % s) 

98 parts.append(q) 

99 result = ''.join(parts) 

100 remaining = remaining[1:].lstrip() # skip past closing quote 

101 return result, remaining 

102 

103 def marker_expr(remaining): 

104 if remaining and remaining[0] == '(': 

105 result, remaining = marker(remaining[1:].lstrip()) 

106 if remaining[0] != ')': 

107 raise SyntaxError('unterminated parenthesis: %s' % remaining) 

108 remaining = remaining[1:].lstrip() 

109 else: 

110 lhs, remaining = marker_var(remaining) 

111 while remaining: 

112 m = MARKER_OP.match(remaining) 

113 if not m: 

114 break 

115 op = m.groups()[0] 

116 remaining = remaining[m.end():] 

117 rhs, remaining = marker_var(remaining) 

118 lhs = {'op': op, 'lhs': lhs, 'rhs': rhs} 

119 result = lhs 

120 return result, remaining 

121 

122 def marker_and(remaining): 

123 lhs, remaining = marker_expr(remaining) 

124 while remaining: 

125 m = AND.match(remaining) 

126 if not m: 

127 break 

128 remaining = remaining[m.end():] 

129 rhs, remaining = marker_expr(remaining) 

130 lhs = {'op': 'and', 'lhs': lhs, 'rhs': rhs} 

131 return lhs, remaining 

132 

133 def marker(remaining): 

134 lhs, remaining = marker_and(remaining) 

135 while remaining: 

136 m = OR.match(remaining) 

137 if not m: 

138 break 

139 remaining = remaining[m.end():] 

140 rhs, remaining = marker_and(remaining) 

141 lhs = {'op': 'or', 'lhs': lhs, 'rhs': rhs} 

142 return lhs, remaining 

143 

144 return marker(marker_string) 

145 

146 

147def parse_requirement(req): 

148 """ 

149 Parse a requirement passed in as a string. Return a Container 

150 whose attributes contain the various parts of the requirement. 

151 """ 

152 remaining = req.strip() 

153 if not remaining or remaining.startswith('#'): 

154 return None 

155 m = IDENTIFIER.match(remaining) 

156 if not m: 

157 raise SyntaxError('name expected: %s' % remaining) 

158 distname = m.groups()[0] 

159 remaining = remaining[m.end():] 

160 extras = mark_expr = versions = uri = None 

161 if remaining and remaining[0] == '[': 

162 i = remaining.find(']', 1) 

163 if i < 0: 

164 raise SyntaxError('unterminated extra: %s' % remaining) 

165 s = remaining[1:i] 

166 remaining = remaining[i + 1:].lstrip() 

167 extras = [] 

168 while s: 

169 m = IDENTIFIER.match(s) 

170 if not m: 

171 raise SyntaxError('malformed extra: %s' % s) 

172 extras.append(m.groups()[0]) 

173 s = s[m.end():] 

174 if not s: 

175 break 

176 if s[0] != ',': 

177 raise SyntaxError('comma expected in extras: %s' % s) 

178 s = s[1:].lstrip() 

179 if not extras: 

180 extras = None 

181 if remaining: 

182 if remaining[0] == '@': 

183 # it's a URI 

184 remaining = remaining[1:].lstrip() 

185 m = NON_SPACE.match(remaining) 

186 if not m: 

187 raise SyntaxError('invalid URI: %s' % remaining) 

188 uri = m.groups()[0] 

189 t = urlparse(uri) 

190 # there are issues with Python and URL parsing, so this test 

191 # is a bit crude. See bpo-20271, bpo-23505. Python doesn't 

192 # always parse invalid URLs correctly - it should raise 

193 # exceptions for malformed URLs 

194 if not (t.scheme and t.netloc): 

195 raise SyntaxError('Invalid URL: %s' % uri) 

196 remaining = remaining[m.end():].lstrip() 

197 else: 

198 

199 def get_versions(ver_remaining): 

200 """ 

201 Return a list of operator, version tuples if any are 

202 specified, else None. 

203 """ 

204 m = COMPARE_OP.match(ver_remaining) 

205 versions = None 

206 if m: 

207 versions = [] 

208 while True: 

209 op = m.groups()[0] 

210 ver_remaining = ver_remaining[m.end():] 

211 m = VERSION_IDENTIFIER.match(ver_remaining) 

212 if not m: 

213 raise SyntaxError('invalid version: %s' % 

214 ver_remaining) 

215 v = m.groups()[0] 

216 versions.append((op, v)) 

217 ver_remaining = ver_remaining[m.end():] 

218 if not ver_remaining or ver_remaining[0] != ',': 

219 break 

220 ver_remaining = ver_remaining[1:].lstrip() 

221 # Some packages have a trailing comma which would break things 

222 # See issue #148 

223 if not ver_remaining: 

224 break 

225 m = COMPARE_OP.match(ver_remaining) 

226 if not m: 

227 raise SyntaxError('invalid constraint: %s' % 

228 ver_remaining) 

229 if not versions: 

230 versions = None 

231 return versions, ver_remaining 

232 

233 if remaining[0] != '(': 

234 versions, remaining = get_versions(remaining) 

235 else: 

236 i = remaining.find(')', 1) 

237 if i < 0: 

238 raise SyntaxError('unterminated parenthesis: %s' % 

239 remaining) 

240 s = remaining[1:i] 

241 remaining = remaining[i + 1:].lstrip() 

242 # As a special diversion from PEP 508, allow a version number 

243 # a.b.c in parentheses as a synonym for ~= a.b.c (because this 

244 # is allowed in earlier PEPs) 

245 if COMPARE_OP.match(s): 

246 versions, _ = get_versions(s) 

247 else: 

248 m = VERSION_IDENTIFIER.match(s) 

249 if not m: 

250 raise SyntaxError('invalid constraint: %s' % s) 

251 v = m.groups()[0] 

252 s = s[m.end():].lstrip() 

253 if s: 

254 raise SyntaxError('invalid constraint: %s' % s) 

255 versions = [('~=', v)] 

256 

257 if remaining: 

258 if remaining[0] != ';': 

259 raise SyntaxError('invalid requirement: %s' % remaining) 

260 remaining = remaining[1:].lstrip() 

261 

262 mark_expr, remaining = parse_marker(remaining) 

263 

264 if remaining and remaining[0] != '#': 

265 raise SyntaxError('unexpected trailing data: %s' % remaining) 

266 

267 if not versions: 

268 rs = distname 

269 else: 

270 rs = '%s %s' % (distname, ', '.join( 

271 ['%s %s' % con for con in versions])) 

272 return Container(name=distname, 

273 extras=extras, 

274 constraints=versions, 

275 marker=mark_expr, 

276 url=uri, 

277 requirement=rs) 

278 

279 

280def get_resources_dests(resources_root, rules): 

281 """Find destinations for resources files""" 

282 

283 def get_rel_path(root, path): 

284 # normalizes and returns a lstripped-/-separated path 

285 root = root.replace(os.path.sep, '/') 

286 path = path.replace(os.path.sep, '/') 

287 assert path.startswith(root) 

288 return path[len(root):].lstrip('/') 

289 

290 destinations = {} 

291 for base, suffix, dest in rules: 

292 prefix = os.path.join(resources_root, base) 

293 for abs_base in iglob(prefix): 

294 abs_glob = os.path.join(abs_base, suffix) 

295 for abs_path in iglob(abs_glob): 

296 resource_file = get_rel_path(resources_root, abs_path) 

297 if dest is None: # remove the entry if it was here 

298 destinations.pop(resource_file, None) 

299 else: 

300 rel_path = get_rel_path(abs_base, abs_path) 

301 rel_dest = dest.replace(os.path.sep, '/').rstrip('/') 

302 destinations[resource_file] = rel_dest + '/' + rel_path 

303 return destinations 

304 

305 

306def in_venv(): 

307 if hasattr(sys, 'real_prefix'): 

308 # virtualenv venvs 

309 result = True 

310 else: 

311 # PEP 405 venvs 

312 result = sys.prefix != getattr(sys, 'base_prefix', sys.prefix) 

313 return result 

314 

315 

316def get_executable(): 

317 # The __PYVENV_LAUNCHER__ dance is apparently no longer needed, as 

318 # changes to the stub launcher mean that sys.executable always points 

319 # to the stub on OS X 

320 # if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__' 

321 # in os.environ): 

322 # result = os.environ['__PYVENV_LAUNCHER__'] 

323 # else: 

324 # result = sys.executable 

325 # return result 

326 # Avoid normcasing: see issue #143 

327 # result = os.path.normcase(sys.executable) 

328 result = sys.executable 

329 if not isinstance(result, text_type): 

330 result = fsdecode(result) 

331 return result 

332 

333 

334def proceed(prompt, allowed_chars, error_prompt=None, default=None): 

335 p = prompt 

336 while True: 

337 s = raw_input(p) 

338 p = prompt 

339 if not s and default: 

340 s = default 

341 if s: 

342 c = s[0].lower() 

343 if c in allowed_chars: 

344 break 

345 if error_prompt: 

346 p = '%c: %s\n%s' % (c, error_prompt, prompt) 

347 return c 

348 

349 

350def extract_by_key(d, keys): 

351 if isinstance(keys, string_types): 

352 keys = keys.split() 

353 result = {} 

354 for key in keys: 

355 if key in d: 

356 result[key] = d[key] 

357 return result 

358 

359 

360def read_exports(stream): 

361 if sys.version_info[0] >= 3: 

362 # needs to be a text stream 

363 stream = codecs.getreader('utf-8')(stream) 

364 # Try to load as JSON, falling back on legacy format 

365 data = stream.read() 

366 stream = StringIO(data) 

367 try: 

368 jdata = json.load(stream) 

369 result = jdata['extensions']['python.exports']['exports'] 

370 for group, entries in result.items(): 

371 for k, v in entries.items(): 

372 s = '%s = %s' % (k, v) 

373 entry = get_export_entry(s) 

374 assert entry is not None 

375 entries[k] = entry 

376 return result 

377 except Exception: 

378 stream.seek(0, 0) 

379 

380 def read_stream(cp, stream): 

381 if hasattr(cp, 'read_file'): 

382 cp.read_file(stream) 

383 else: 

384 cp.readfp(stream) 

385 

386 cp = configparser.ConfigParser() 

387 try: 

388 read_stream(cp, stream) 

389 except configparser.MissingSectionHeaderError: 

390 stream.close() 

391 data = textwrap.dedent(data) 

392 stream = StringIO(data) 

393 read_stream(cp, stream) 

394 

395 result = {} 

396 for key in cp.sections(): 

397 result[key] = entries = {} 

398 for name, value in cp.items(key): 

399 s = '%s = %s' % (name, value) 

400 entry = get_export_entry(s) 

401 assert entry is not None 

402 # entry.dist = self 

403 entries[name] = entry 

404 return result 

405 

406 

407def write_exports(exports, stream): 

408 if sys.version_info[0] >= 3: 

409 # needs to be a text stream 

410 stream = codecs.getwriter('utf-8')(stream) 

411 cp = configparser.ConfigParser() 

412 for k, v in exports.items(): 

413 # TODO check k, v for valid values 

414 cp.add_section(k) 

415 for entry in v.values(): 

416 if entry.suffix is None: 

417 s = entry.prefix 

418 else: 

419 s = '%s:%s' % (entry.prefix, entry.suffix) 

420 if entry.flags: 

421 s = '%s [%s]' % (s, ', '.join(entry.flags)) 

422 cp.set(k, entry.name, s) 

423 cp.write(stream) 

424 

425 

426@contextlib.contextmanager 

427def tempdir(): 

428 td = tempfile.mkdtemp() 

429 try: 

430 yield td 

431 finally: 

432 shutil.rmtree(td) 

433 

434 

435@contextlib.contextmanager 

436def chdir(d): 

437 cwd = os.getcwd() 

438 try: 

439 os.chdir(d) 

440 yield 

441 finally: 

442 os.chdir(cwd) 

443 

444 

445@contextlib.contextmanager 

446def socket_timeout(seconds=15): 

447 cto = socket.getdefaulttimeout() 

448 try: 

449 socket.setdefaulttimeout(seconds) 

450 yield 

451 finally: 

452 socket.setdefaulttimeout(cto) 

453 

454 

455class cached_property(object): 

456 

457 def __init__(self, func): 

458 self.func = func 

459 # for attr in ('__name__', '__module__', '__doc__'): 

460 # setattr(self, attr, getattr(func, attr, None)) 

461 

462 def __get__(self, obj, cls=None): 

463 if obj is None: 

464 return self 

465 value = self.func(obj) 

466 object.__setattr__(obj, self.func.__name__, value) 

467 # obj.__dict__[self.func.__name__] = value = self.func(obj) 

468 return value 

469 

470 

471def convert_path(pathname): 

472 """Return 'pathname' as a name that will work on the native filesystem. 

473 

474 The path is split on '/' and put back together again using the current 

475 directory separator. Needed because filenames in the setup script are 

476 always supplied in Unix style, and have to be converted to the local 

477 convention before we can actually use them in the filesystem. Raises 

478 ValueError on non-Unix-ish systems if 'pathname' either starts or 

479 ends with a slash. 

480 """ 

481 if os.sep == '/': 

482 return pathname 

483 if not pathname: 

484 return pathname 

485 if pathname[0] == '/': 

486 raise ValueError("path '%s' cannot be absolute" % pathname) 

487 if pathname[-1] == '/': 

488 raise ValueError("path '%s' cannot end with '/'" % pathname) 

489 

490 paths = pathname.split('/') 

491 while os.curdir in paths: 

492 paths.remove(os.curdir) 

493 if not paths: 

494 return os.curdir 

495 return os.path.join(*paths) 

496 

497 

498class FileOperator(object): 

499 

500 def __init__(self, dry_run=False): 

501 self.dry_run = dry_run 

502 self.ensured = set() 

503 self._init_record() 

504 

505 def _init_record(self): 

506 self.record = False 

507 self.files_written = set() 

508 self.dirs_created = set() 

509 

510 def record_as_written(self, path): 

511 if self.record: 

512 self.files_written.add(path) 

513 

514 def newer(self, source, target): 

515 """Tell if the target is newer than the source. 

516 

517 Returns true if 'source' exists and is more recently modified than 

518 'target', or if 'source' exists and 'target' doesn't. 

519 

520 Returns false if both exist and 'target' is the same age or younger 

521 than 'source'. Raise PackagingFileError if 'source' does not exist. 

522 

523 Note that this test is not very accurate: files created in the same 

524 second will have the same "age". 

525 """ 

526 if not os.path.exists(source): 

527 raise DistlibException("file '%r' does not exist" % 

528 os.path.abspath(source)) 

529 if not os.path.exists(target): 

530 return True 

531 

532 return os.stat(source).st_mtime > os.stat(target).st_mtime 

533 

534 def copy_file(self, infile, outfile, check=True): 

535 """Copy a file respecting dry-run and force flags. 

536 """ 

537 self.ensure_dir(os.path.dirname(outfile)) 

538 logger.info('Copying %s to %s', infile, outfile) 

539 if not self.dry_run: 

540 msg = None 

541 if check: 

542 if os.path.islink(outfile): 

543 msg = '%s is a symlink' % outfile 

544 elif os.path.exists(outfile) and not os.path.isfile(outfile): 

545 msg = '%s is a non-regular file' % outfile 

546 if msg: 

547 raise ValueError(msg + ' which would be overwritten') 

548 shutil.copyfile(infile, outfile) 

549 self.record_as_written(outfile) 

550 

551 def copy_stream(self, instream, outfile, encoding=None): 

552 assert not os.path.isdir(outfile) 

553 self.ensure_dir(os.path.dirname(outfile)) 

554 logger.info('Copying stream %s to %s', instream, outfile) 

555 if not self.dry_run: 

556 if encoding is None: 

557 outstream = open(outfile, 'wb') 

558 else: 

559 outstream = codecs.open(outfile, 'w', encoding=encoding) 

560 try: 

561 shutil.copyfileobj(instream, outstream) 

562 finally: 

563 outstream.close() 

564 self.record_as_written(outfile) 

565 

566 def write_binary_file(self, path, data): 

567 self.ensure_dir(os.path.dirname(path)) 

568 if not self.dry_run: 

569 if os.path.exists(path): 

570 os.remove(path) 

571 with open(path, 'wb') as f: 

572 f.write(data) 

573 self.record_as_written(path) 

574 

575 def write_text_file(self, path, data, encoding): 

576 self.write_binary_file(path, data.encode(encoding)) 

577 

578 def set_mode(self, bits, mask, files): 

579 if os.name == 'posix' or (os.name == 'java' and os._name == 'posix'): 

580 # Set the executable bits (owner, group, and world) on 

581 # all the files specified. 

582 for f in files: 

583 if self.dry_run: 

584 logger.info("changing mode of %s", f) 

585 else: 

586 mode = (os.stat(f).st_mode | bits) & mask 

587 logger.info("changing mode of %s to %o", f, mode) 

588 os.chmod(f, mode) 

589 

590 set_executable_mode = lambda s, f: s.set_mode(0o555, 0o7777, f) 

591 

592 def ensure_dir(self, path): 

593 path = os.path.abspath(path) 

594 if path not in self.ensured and not os.path.exists(path): 

595 self.ensured.add(path) 

596 d, f = os.path.split(path) 

597 self.ensure_dir(d) 

598 logger.info('Creating %s' % path) 

599 if not self.dry_run: 

600 os.mkdir(path) 

601 if self.record: 

602 self.dirs_created.add(path) 

603 

604 def byte_compile(self, 

605 path, 

606 optimize=False, 

607 force=False, 

608 prefix=None, 

609 hashed_invalidation=False): 

610 dpath = cache_from_source(path, not optimize) 

611 logger.info('Byte-compiling %s to %s', path, dpath) 

612 if not self.dry_run: 

613 if force or self.newer(path, dpath): 

614 if not prefix: 

615 diagpath = None 

616 else: 

617 assert path.startswith(prefix) 

618 diagpath = path[len(prefix):] 

619 compile_kwargs = {} 

620 if hashed_invalidation and hasattr(py_compile, 

621 'PycInvalidationMode'): 

622 compile_kwargs[ 

623 'invalidation_mode'] = py_compile.PycInvalidationMode.CHECKED_HASH 

624 py_compile.compile(path, dpath, diagpath, True, 

625 **compile_kwargs) # raise error 

626 self.record_as_written(dpath) 

627 return dpath 

628 

629 def ensure_removed(self, path): 

630 if os.path.exists(path): 

631 if os.path.isdir(path) and not os.path.islink(path): 

632 logger.debug('Removing directory tree at %s', path) 

633 if not self.dry_run: 

634 shutil.rmtree(path) 

635 if self.record: 

636 if path in self.dirs_created: 

637 self.dirs_created.remove(path) 

638 else: 

639 if os.path.islink(path): 

640 s = 'link' 

641 else: 

642 s = 'file' 

643 logger.debug('Removing %s %s', s, path) 

644 if not self.dry_run: 

645 os.remove(path) 

646 if self.record: 

647 if path in self.files_written: 

648 self.files_written.remove(path) 

649 

650 def is_writable(self, path): 

651 result = False 

652 while not result: 

653 if os.path.exists(path): 

654 result = os.access(path, os.W_OK) 

655 break 

656 parent = os.path.dirname(path) 

657 if parent == path: 

658 break 

659 path = parent 

660 return result 

661 

662 def commit(self): 

663 """ 

664 Commit recorded changes, turn off recording, return 

665 changes. 

666 """ 

667 assert self.record 

668 result = self.files_written, self.dirs_created 

669 self._init_record() 

670 return result 

671 

672 def rollback(self): 

673 if not self.dry_run: 

674 for f in list(self.files_written): 

675 if os.path.exists(f): 

676 os.remove(f) 

677 # dirs should all be empty now, except perhaps for 

678 # __pycache__ subdirs 

679 # reverse so that subdirs appear before their parents 

680 dirs = sorted(self.dirs_created, reverse=True) 

681 for d in dirs: 

682 flist = os.listdir(d) 

683 if flist: 

684 assert flist == ['__pycache__'] 

685 sd = os.path.join(d, flist[0]) 

686 os.rmdir(sd) 

687 os.rmdir(d) # should fail if non-empty 

688 self._init_record() 

689 

690 

691def resolve(module_name, dotted_path): 

692 if module_name in sys.modules: 

693 mod = sys.modules[module_name] 

694 else: 

695 mod = __import__(module_name) 

696 if dotted_path is None: 

697 result = mod 

698 else: 

699 parts = dotted_path.split('.') 

700 result = getattr(mod, parts.pop(0)) 

701 for p in parts: 

702 result = getattr(result, p) 

703 return result 

704 

705 

706class ExportEntry(object): 

707 

708 def __init__(self, name, prefix, suffix, flags): 

709 self.name = name 

710 self.prefix = prefix 

711 self.suffix = suffix 

712 self.flags = flags 

713 

714 @cached_property 

715 def value(self): 

716 return resolve(self.prefix, self.suffix) 

717 

718 def __repr__(self): # pragma: no cover 

719 return '<ExportEntry %s = %s:%s %s>' % (self.name, self.prefix, 

720 self.suffix, self.flags) 

721 

722 def __eq__(self, other): 

723 if not isinstance(other, ExportEntry): 

724 result = False 

725 else: 

726 result = (self.name == other.name and self.prefix == other.prefix 

727 and self.suffix == other.suffix 

728 and self.flags == other.flags) 

729 return result 

730 

731 __hash__ = object.__hash__ 

732 

733 

734ENTRY_RE = re.compile( 

735 r'''(?P<name>([^\[]\S*)) 

736 \s*=\s*(?P<callable>(\w+)([:\.]\w+)*) 

737 \s*(\[\s*(?P<flags>[\w-]+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])? 

738 ''', re.VERBOSE) 

739 

740 

741def get_export_entry(specification): 

742 m = ENTRY_RE.search(specification) 

743 if not m: 

744 result = None 

745 if '[' in specification or ']' in specification: 

746 raise DistlibException("Invalid specification " 

747 "'%s'" % specification) 

748 else: 

749 d = m.groupdict() 

750 name = d['name'] 

751 path = d['callable'] 

752 colons = path.count(':') 

753 if colons == 0: 

754 prefix, suffix = path, None 

755 else: 

756 if colons != 1: 

757 raise DistlibException("Invalid specification " 

758 "'%s'" % specification) 

759 prefix, suffix = path.split(':') 

760 flags = d['flags'] 

761 if flags is None: 

762 if '[' in specification or ']' in specification: 

763 raise DistlibException("Invalid specification " 

764 "'%s'" % specification) 

765 flags = [] 

766 else: 

767 flags = [f.strip() for f in flags.split(',')] 

768 result = ExportEntry(name, prefix, suffix, flags) 

769 return result 

770 

771 

772def get_cache_base(suffix=None): 

773 """ 

774 Return the default base location for distlib caches. If the directory does 

775 not exist, it is created. Use the suffix provided for the base directory, 

776 and default to '.distlib' if it isn't provided. 

777 

778 On Windows, if LOCALAPPDATA is defined in the environment, then it is 

779 assumed to be a directory, and will be the parent directory of the result. 

780 On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home 

781 directory - using os.expanduser('~') - will be the parent directory of 

782 the result. 

783 

784 The result is just the directory '.distlib' in the parent directory as 

785 determined above, or with the name specified with ``suffix``. 

786 """ 

787 if suffix is None: 

788 suffix = '.distlib' 

789 if os.name == 'nt' and 'LOCALAPPDATA' in os.environ: 

790 result = os.path.expandvars('$localappdata') 

791 else: 

792 # Assume posix, or old Windows 

793 result = os.path.expanduser('~') 

794 # we use 'isdir' instead of 'exists', because we want to 

795 # fail if there's a file with that name 

796 if os.path.isdir(result): 

797 usable = os.access(result, os.W_OK) 

798 if not usable: 

799 logger.warning('Directory exists but is not writable: %s', result) 

800 else: 

801 try: 

802 os.makedirs(result) 

803 usable = True 

804 except OSError: 

805 logger.warning('Unable to create %s', result, exc_info=True) 

806 usable = False 

807 if not usable: 

808 result = tempfile.mkdtemp() 

809 logger.warning('Default location unusable, using %s', result) 

810 return os.path.join(result, suffix) 

811 

812 

813def path_to_cache_dir(path): 

814 """ 

815 Convert an absolute path to a directory name for use in a cache. 

816 

817 The algorithm used is: 

818 

819 #. On Windows, any ``':'`` in the drive is replaced with ``'---'``. 

820 #. Any occurrence of ``os.sep`` is replaced with ``'--'``. 

821 #. ``'.cache'`` is appended. 

822 """ 

823 d, p = os.path.splitdrive(os.path.abspath(path)) 

824 if d: 

825 d = d.replace(':', '---') 

826 p = p.replace(os.sep, '--') 

827 return d + p + '.cache' 

828 

829 

830def ensure_slash(s): 

831 if not s.endswith('/'): 

832 return s + '/' 

833 return s 

834 

835 

836def parse_credentials(netloc): 

837 username = password = None 

838 if '@' in netloc: 

839 prefix, netloc = netloc.rsplit('@', 1) 

840 if ':' not in prefix: 

841 username = prefix 

842 else: 

843 username, password = prefix.split(':', 1) 

844 if username: 

845 username = unquote(username) 

846 if password: 

847 password = unquote(password) 

848 return username, password, netloc 

849 

850 

851def get_process_umask(): 

852 result = os.umask(0o22) 

853 os.umask(result) 

854 return result 

855 

856 

857def is_string_sequence(seq): 

858 result = True 

859 i = None 

860 for i, s in enumerate(seq): 

861 if not isinstance(s, string_types): 

862 result = False 

863 break 

864 assert i is not None 

865 return result 

866 

867 

868PROJECT_NAME_AND_VERSION = re.compile( 

869 '([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-' 

870 '([a-z0-9_.+-]+)', re.I) 

871PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)') 

872 

873 

874def split_filename(filename, project_name=None): 

875 """ 

876 Extract name, version, python version from a filename (no extension) 

877 

878 Return name, version, pyver or None 

879 """ 

880 result = None 

881 pyver = None 

882 filename = unquote(filename).replace(' ', '-') 

883 m = PYTHON_VERSION.search(filename) 

884 if m: 

885 pyver = m.group(1) 

886 filename = filename[:m.start()] 

887 if project_name and len(filename) > len(project_name) + 1: 

888 m = re.match(re.escape(project_name) + r'\b', filename) 

889 if m: 

890 n = m.end() 

891 result = filename[:n], filename[n + 1:], pyver 

892 if result is None: 

893 m = PROJECT_NAME_AND_VERSION.match(filename) 

894 if m: 

895 result = m.group(1), m.group(3), pyver 

896 return result 

897 

898 

899# Allow spaces in name because of legacy dists like "Twisted Core" 

900NAME_VERSION_RE = re.compile(r'(?P<name>[\w .-]+)\s*' 

901 r'\(\s*(?P<ver>[^\s)]+)\)$') 

902 

903 

904def parse_name_and_version(p): 

905 """ 

906 A utility method used to get name and version from a string. 

907 

908 From e.g. a Provides-Dist value. 

909 

910 :param p: A value in a form 'foo (1.0)' 

911 :return: The name and version as a tuple. 

912 """ 

913 m = NAME_VERSION_RE.match(p) 

914 if not m: 

915 raise DistlibException('Ill-formed name/version string: \'%s\'' % p) 

916 d = m.groupdict() 

917 return d['name'].strip().lower(), d['ver'] 

918 

919 

920def get_extras(requested, available): 

921 result = set() 

922 requested = set(requested or []) 

923 available = set(available or []) 

924 if '*' in requested: 

925 requested.remove('*') 

926 result |= available 

927 for r in requested: 

928 if r == '-': 

929 result.add(r) 

930 elif r.startswith('-'): 

931 unwanted = r[1:] 

932 if unwanted not in available: 

933 logger.warning('undeclared extra: %s' % unwanted) 

934 if unwanted in result: 

935 result.remove(unwanted) 

936 else: 

937 if r not in available: 

938 logger.warning('undeclared extra: %s' % r) 

939 result.add(r) 

940 return result 

941 

942 

943# 

944# Extended metadata functionality 

945# 

946 

947 

948def _get_external_data(url): 

949 result = {} 

950 try: 

951 # urlopen might fail if it runs into redirections, 

952 # because of Python issue #13696. Fixed in locators 

953 # using a custom redirect handler. 

954 resp = urlopen(url) 

955 headers = resp.info() 

956 ct = headers.get('Content-Type') 

957 if not ct.startswith('application/json'): 

958 logger.debug('Unexpected response for JSON request: %s', ct) 

959 else: 

960 reader = codecs.getreader('utf-8')(resp) 

961 # data = reader.read().decode('utf-8') 

962 # result = json.loads(data) 

963 result = json.load(reader) 

964 except Exception as e: 

965 logger.exception('Failed to get external data for %s: %s', url, e) 

966 return result 

967 

968 

969_external_data_base_url = 'https://www.red-dove.com/pypi/projects/' 

970 

971 

972def get_project_data(name): 

973 url = '%s/%s/project.json' % (name[0].upper(), name) 

974 url = urljoin(_external_data_base_url, url) 

975 result = _get_external_data(url) 

976 return result 

977 

978 

979def get_package_data(name, version): 

980 url = '%s/%s/package-%s.json' % (name[0].upper(), name, version) 

981 url = urljoin(_external_data_base_url, url) 

982 return _get_external_data(url) 

983 

984 

985class Cache(object): 

986 """ 

987 A class implementing a cache for resources that need to live in the file system 

988 e.g. shared libraries. This class was moved from resources to here because it 

989 could be used by other modules, e.g. the wheel module. 

990 """ 

991 

992 def __init__(self, base): 

993 """ 

994 Initialise an instance. 

995 

996 :param base: The base directory where the cache should be located. 

997 """ 

998 # we use 'isdir' instead of 'exists', because we want to 

999 # fail if there's a file with that name 

1000 if not os.path.isdir(base): # pragma: no cover 

1001 os.makedirs(base) 

1002 if (os.stat(base).st_mode & 0o77) != 0: 

1003 logger.warning('Directory \'%s\' is not private', base) 

1004 self.base = os.path.abspath(os.path.normpath(base)) 

1005 

1006 def prefix_to_dir(self, prefix): 

1007 """ 

1008 Converts a resource prefix to a directory name in the cache. 

1009 """ 

1010 return path_to_cache_dir(prefix) 

1011 

1012 def clear(self): 

1013 """ 

1014 Clear the cache. 

1015 """ 

1016 not_removed = [] 

1017 for fn in os.listdir(self.base): 

1018 fn = os.path.join(self.base, fn) 

1019 try: 

1020 if os.path.islink(fn) or os.path.isfile(fn): 

1021 os.remove(fn) 

1022 elif os.path.isdir(fn): 

1023 shutil.rmtree(fn) 

1024 except Exception: 

1025 not_removed.append(fn) 

1026 return not_removed 

1027 

1028 

1029class EventMixin(object): 

1030 """ 

1031 A very simple publish/subscribe system. 

1032 """ 

1033 

1034 def __init__(self): 

1035 self._subscribers = {} 

1036 

1037 def add(self, event, subscriber, append=True): 

1038 """ 

1039 Add a subscriber for an event. 

1040 

1041 :param event: The name of an event. 

1042 :param subscriber: The subscriber to be added (and called when the 

1043 event is published). 

1044 :param append: Whether to append or prepend the subscriber to an 

1045 existing subscriber list for the event. 

1046 """ 

1047 subs = self._subscribers 

1048 if event not in subs: 

1049 subs[event] = deque([subscriber]) 

1050 else: 

1051 sq = subs[event] 

1052 if append: 

1053 sq.append(subscriber) 

1054 else: 

1055 sq.appendleft(subscriber) 

1056 

1057 def remove(self, event, subscriber): 

1058 """ 

1059 Remove a subscriber for an event. 

1060 

1061 :param event: The name of an event. 

1062 :param subscriber: The subscriber to be removed. 

1063 """ 

1064 subs = self._subscribers 

1065 if event not in subs: 

1066 raise ValueError('No subscribers: %r' % event) 

1067 subs[event].remove(subscriber) 

1068 

1069 def get_subscribers(self, event): 

1070 """ 

1071 Return an iterator for the subscribers for an event. 

1072 :param event: The event to return subscribers for. 

1073 """ 

1074 return iter(self._subscribers.get(event, ())) 

1075 

1076 def publish(self, event, *args, **kwargs): 

1077 """ 

1078 Publish a event and return a list of values returned by its 

1079 subscribers. 

1080 

1081 :param event: The event to publish. 

1082 :param args: The positional arguments to pass to the event's 

1083 subscribers. 

1084 :param kwargs: The keyword arguments to pass to the event's 

1085 subscribers. 

1086 """ 

1087 result = [] 

1088 for subscriber in self.get_subscribers(event): 

1089 try: 

1090 value = subscriber(event, *args, **kwargs) 

1091 except Exception: 

1092 logger.exception('Exception during event publication') 

1093 value = None 

1094 result.append(value) 

1095 logger.debug('publish %s: args = %s, kwargs = %s, result = %s', event, 

1096 args, kwargs, result) 

1097 return result 

1098 

1099 

1100# 

1101# Simple sequencing 

1102# 

1103class Sequencer(object): 

1104 

1105 def __init__(self): 

1106 self._preds = {} 

1107 self._succs = {} 

1108 self._nodes = set() # nodes with no preds/succs 

1109 

1110 def add_node(self, node): 

1111 self._nodes.add(node) 

1112 

1113 def remove_node(self, node, edges=False): 

1114 if node in self._nodes: 

1115 self._nodes.remove(node) 

1116 if edges: 

1117 for p in set(self._preds.get(node, ())): 

1118 self.remove(p, node) 

1119 for s in set(self._succs.get(node, ())): 

1120 self.remove(node, s) 

1121 # Remove empties 

1122 for k, v in list(self._preds.items()): 

1123 if not v: 

1124 del self._preds[k] 

1125 for k, v in list(self._succs.items()): 

1126 if not v: 

1127 del self._succs[k] 

1128 

1129 def add(self, pred, succ): 

1130 assert pred != succ 

1131 self._preds.setdefault(succ, set()).add(pred) 

1132 self._succs.setdefault(pred, set()).add(succ) 

1133 

1134 def remove(self, pred, succ): 

1135 assert pred != succ 

1136 try: 

1137 preds = self._preds[succ] 

1138 succs = self._succs[pred] 

1139 except KeyError: # pragma: no cover 

1140 raise ValueError('%r not a successor of anything' % succ) 

1141 try: 

1142 preds.remove(pred) 

1143 succs.remove(succ) 

1144 except KeyError: # pragma: no cover 

1145 raise ValueError('%r not a successor of %r' % (succ, pred)) 

1146 

1147 def is_step(self, step): 

1148 return (step in self._preds or step in self._succs 

1149 or step in self._nodes) 

1150 

1151 def get_steps(self, final): 

1152 if not self.is_step(final): 

1153 raise ValueError('Unknown: %r' % final) 

1154 result = [] 

1155 todo = [] 

1156 seen = set() 

1157 todo.append(final) 

1158 while todo: 

1159 step = todo.pop(0) 

1160 if step in seen: 

1161 # if a step was already seen, 

1162 # move it to the end (so it will appear earlier 

1163 # when reversed on return) ... but not for the 

1164 # final step, as that would be confusing for 

1165 # users 

1166 if step != final: 

1167 result.remove(step) 

1168 result.append(step) 

1169 else: 

1170 seen.add(step) 

1171 result.append(step) 

1172 preds = self._preds.get(step, ()) 

1173 todo.extend(preds) 

1174 return reversed(result) 

1175 

1176 @property 

1177 def strong_connections(self): 

1178 # http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm 

1179 index_counter = [0] 

1180 stack = [] 

1181 lowlinks = {} 

1182 index = {} 

1183 result = [] 

1184 

1185 graph = self._succs 

1186 

1187 def strongconnect(node): 

1188 # set the depth index for this node to the smallest unused index 

1189 index[node] = index_counter[0] 

1190 lowlinks[node] = index_counter[0] 

1191 index_counter[0] += 1 

1192 stack.append(node) 

1193 

1194 # Consider successors 

1195 try: 

1196 successors = graph[node] 

1197 except Exception: 

1198 successors = [] 

1199 for successor in successors: 

1200 if successor not in lowlinks: 

1201 # Successor has not yet been visited 

1202 strongconnect(successor) 

1203 lowlinks[node] = min(lowlinks[node], lowlinks[successor]) 

1204 elif successor in stack: 

1205 # the successor is in the stack and hence in the current 

1206 # strongly connected component (SCC) 

1207 lowlinks[node] = min(lowlinks[node], index[successor]) 

1208 

1209 # If `node` is a root node, pop the stack and generate an SCC 

1210 if lowlinks[node] == index[node]: 

1211 connected_component = [] 

1212 

1213 while True: 

1214 successor = stack.pop() 

1215 connected_component.append(successor) 

1216 if successor == node: 

1217 break 

1218 component = tuple(connected_component) 

1219 # storing the result 

1220 result.append(component) 

1221 

1222 for node in graph: 

1223 if node not in lowlinks: 

1224 strongconnect(node) 

1225 

1226 return result 

1227 

1228 @property 

1229 def dot(self): 

1230 result = ['digraph G {'] 

1231 for succ in self._preds: 

1232 preds = self._preds[succ] 

1233 for pred in preds: 

1234 result.append(' %s -> %s;' % (pred, succ)) 

1235 for node in self._nodes: 

1236 result.append(' %s;' % node) 

1237 result.append('}') 

1238 return '\n'.join(result) 

1239 

1240 

1241# 

1242# Unarchiving functionality for zip, tar, tgz, tbz, whl 

1243# 

1244 

1245ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz', 

1246 '.whl') 

1247 

1248 

1249def unarchive(archive_filename, dest_dir, format=None, check=True): 

1250 

1251 def check_path(path): 

1252 if not isinstance(path, text_type): 

1253 path = path.decode('utf-8') 

1254 p = os.path.abspath(os.path.join(dest_dir, path)) 

1255 if not p.startswith(dest_dir) or p[plen] != os.sep: 

1256 raise ValueError('path outside destination: %r' % p) 

1257 

1258 dest_dir = os.path.abspath(dest_dir) 

1259 plen = len(dest_dir) 

1260 archive = None 

1261 if format is None: 

1262 if archive_filename.endswith(('.zip', '.whl')): 

1263 format = 'zip' 

1264 elif archive_filename.endswith(('.tar.gz', '.tgz')): 

1265 format = 'tgz' 

1266 mode = 'r:gz' 

1267 elif archive_filename.endswith(('.tar.bz2', '.tbz')): 

1268 format = 'tbz' 

1269 mode = 'r:bz2' 

1270 elif archive_filename.endswith('.tar'): 

1271 format = 'tar' 

1272 mode = 'r' 

1273 else: # pragma: no cover 

1274 raise ValueError('Unknown format for %r' % archive_filename) 

1275 try: 

1276 if format == 'zip': 

1277 archive = ZipFile(archive_filename, 'r') 

1278 if check: 

1279 names = archive.namelist() 

1280 for name in names: 

1281 check_path(name) 

1282 else: 

1283 archive = tarfile.open(archive_filename, mode) 

1284 if check: 

1285 names = archive.getnames() 

1286 for name in names: 

1287 check_path(name) 

1288 if format != 'zip' and sys.version_info[0] < 3: 

1289 # See Python issue 17153. If the dest path contains Unicode, 

1290 # tarfile extraction fails on Python 2.x if a member path name 

1291 # contains non-ASCII characters - it leads to an implicit 

1292 # bytes -> unicode conversion using ASCII to decode. 

1293 for tarinfo in archive.getmembers(): 

1294 if not isinstance(tarinfo.name, text_type): 

1295 tarinfo.name = tarinfo.name.decode('utf-8') 

1296 

1297 # Limit extraction of dangerous items, if this Python 

1298 # allows it easily. If not, just trust the input. 

1299 # See: https://docs.python.org/3/library/tarfile.html#extraction-filters 

1300 def extraction_filter(member, path): 

1301 """Run tarfile.tar_filter, but raise the expected ValueError""" 

1302 # This is only called if the current Python has tarfile filters 

1303 try: 

1304 return tarfile.tar_filter(member, path) 

1305 except tarfile.FilterError as exc: 

1306 raise ValueError(str(exc)) 

1307 

1308 archive.extraction_filter = extraction_filter 

1309 

1310 archive.extractall(dest_dir) 

1311 

1312 finally: 

1313 if archive: 

1314 archive.close() 

1315 

1316 

1317def zip_dir(directory): 

1318 """zip a directory tree into a BytesIO object""" 

1319 result = io.BytesIO() 

1320 dlen = len(directory) 

1321 with ZipFile(result, "w") as zf: 

1322 for root, dirs, files in os.walk(directory): 

1323 for name in files: 

1324 full = os.path.join(root, name) 

1325 rel = root[dlen:] 

1326 dest = os.path.join(rel, name) 

1327 zf.write(full, dest) 

1328 return result 

1329 

1330 

1331# 

1332# Simple progress bar 

1333# 

1334 

1335UNITS = ('', 'K', 'M', 'G', 'T', 'P') 

1336 

1337 

1338class Progress(object): 

1339 unknown = 'UNKNOWN' 

1340 

1341 def __init__(self, minval=0, maxval=100): 

1342 assert maxval is None or maxval >= minval 

1343 self.min = self.cur = minval 

1344 self.max = maxval 

1345 self.started = None 

1346 self.elapsed = 0 

1347 self.done = False 

1348 

1349 def update(self, curval): 

1350 assert self.min <= curval 

1351 assert self.max is None or curval <= self.max 

1352 self.cur = curval 

1353 now = time.time() 

1354 if self.started is None: 

1355 self.started = now 

1356 else: 

1357 self.elapsed = now - self.started 

1358 

1359 def increment(self, incr): 

1360 assert incr >= 0 

1361 self.update(self.cur + incr) 

1362 

1363 def start(self): 

1364 self.update(self.min) 

1365 return self 

1366 

1367 def stop(self): 

1368 if self.max is not None: 

1369 self.update(self.max) 

1370 self.done = True 

1371 

1372 @property 

1373 def maximum(self): 

1374 return self.unknown if self.max is None else self.max 

1375 

1376 @property 

1377 def percentage(self): 

1378 if self.done: 

1379 result = '100 %' 

1380 elif self.max is None: 

1381 result = ' ?? %' 

1382 else: 

1383 v = 100.0 * (self.cur - self.min) / (self.max - self.min) 

1384 result = '%3d %%' % v 

1385 return result 

1386 

1387 def format_duration(self, duration): 

1388 if (duration <= 0) and self.max is None or self.cur == self.min: 

1389 result = '??:??:??' 

1390 # elif duration < 1: 

1391 # result = '--:--:--' 

1392 else: 

1393 result = time.strftime('%H:%M:%S', time.gmtime(duration)) 

1394 return result 

1395 

1396 @property 

1397 def ETA(self): 

1398 if self.done: 

1399 prefix = 'Done' 

1400 t = self.elapsed 

1401 # import pdb; pdb.set_trace() 

1402 else: 

1403 prefix = 'ETA ' 

1404 if self.max is None: 

1405 t = -1 

1406 elif self.elapsed == 0 or (self.cur == self.min): 

1407 t = 0 

1408 else: 

1409 # import pdb; pdb.set_trace() 

1410 t = float(self.max - self.min) 

1411 t /= self.cur - self.min 

1412 t = (t - 1) * self.elapsed 

1413 return '%s: %s' % (prefix, self.format_duration(t)) 

1414 

1415 @property 

1416 def speed(self): 

1417 if self.elapsed == 0: 

1418 result = 0.0 

1419 else: 

1420 result = (self.cur - self.min) / self.elapsed 

1421 for unit in UNITS: 

1422 if result < 1000: 

1423 break 

1424 result /= 1000.0 

1425 return '%d %sB/s' % (result, unit) 

1426 

1427 

1428# 

1429# Glob functionality 

1430# 

1431 

1432RICH_GLOB = re.compile(r'\{([^}]*)\}') 

1433_CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]') 

1434_CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$') 

1435 

1436 

1437def iglob(path_glob): 

1438 """Extended globbing function that supports ** and {opt1,opt2,opt3}.""" 

1439 if _CHECK_RECURSIVE_GLOB.search(path_glob): 

1440 msg = """invalid glob %r: recursive glob "**" must be used alone""" 

1441 raise ValueError(msg % path_glob) 

1442 if _CHECK_MISMATCH_SET.search(path_glob): 

1443 msg = """invalid glob %r: mismatching set marker '{' or '}'""" 

1444 raise ValueError(msg % path_glob) 

1445 return _iglob(path_glob) 

1446 

1447 

1448def _iglob(path_glob): 

1449 rich_path_glob = RICH_GLOB.split(path_glob, 1) 

1450 if len(rich_path_glob) > 1: 

1451 assert len(rich_path_glob) == 3, rich_path_glob 

1452 prefix, set, suffix = rich_path_glob 

1453 for item in set.split(','): 

1454 for path in _iglob(''.join((prefix, item, suffix))): 

1455 yield path 

1456 else: 

1457 if '**' not in path_glob: 

1458 for item in std_iglob(path_glob): 

1459 yield item 

1460 else: 

1461 prefix, radical = path_glob.split('**', 1) 

1462 if prefix == '': 

1463 prefix = '.' 

1464 if radical == '': 

1465 radical = '*' 

1466 else: 

1467 # we support both 

1468 radical = radical.lstrip('/') 

1469 radical = radical.lstrip('\\') 

1470 for path, dir, files in os.walk(prefix): 

1471 path = os.path.normpath(path) 

1472 for fn in _iglob(os.path.join(path, radical)): 

1473 yield fn 

1474 

1475 

1476if ssl: 

1477 from .compat import (HTTPSHandler as BaseHTTPSHandler, match_hostname, 

1478 CertificateError) 

1479 

1480 # 

1481 # HTTPSConnection which verifies certificates/matches domains 

1482 # 

1483 

1484 class HTTPSConnection(httplib.HTTPSConnection): 

1485 ca_certs = None # set this to the path to the certs file (.pem) 

1486 check_domain = True # only used if ca_certs is not None 

1487 

1488 # noinspection PyPropertyAccess 

1489 def connect(self): 

1490 sock = socket.create_connection((self.host, self.port), 

1491 self.timeout) 

1492 if getattr(self, '_tunnel_host', False): 

1493 self.sock = sock 

1494 self._tunnel() 

1495 

1496 context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) 

1497 if hasattr(ssl, 'OP_NO_SSLv2'): 

1498 context.options |= ssl.OP_NO_SSLv2 

1499 if getattr(self, 'cert_file', None): 

1500 context.load_cert_chain(self.cert_file, self.key_file) 

1501 kwargs = {} 

1502 if self.ca_certs: 

1503 context.verify_mode = ssl.CERT_REQUIRED 

1504 context.load_verify_locations(cafile=self.ca_certs) 

1505 if getattr(ssl, 'HAS_SNI', False): 

1506 kwargs['server_hostname'] = self.host 

1507 

1508 self.sock = context.wrap_socket(sock, **kwargs) 

1509 if self.ca_certs and self.check_domain: 

1510 try: 

1511 match_hostname(self.sock.getpeercert(), self.host) 

1512 logger.debug('Host verified: %s', self.host) 

1513 except CertificateError: # pragma: no cover 

1514 self.sock.shutdown(socket.SHUT_RDWR) 

1515 self.sock.close() 

1516 raise 

1517 

1518 class HTTPSHandler(BaseHTTPSHandler): 

1519 

1520 def __init__(self, ca_certs, check_domain=True): 

1521 BaseHTTPSHandler.__init__(self) 

1522 self.ca_certs = ca_certs 

1523 self.check_domain = check_domain 

1524 

1525 def _conn_maker(self, *args, **kwargs): 

1526 """ 

1527 This is called to create a connection instance. Normally you'd 

1528 pass a connection class to do_open, but it doesn't actually check for 

1529 a class, and just expects a callable. As long as we behave just as a 

1530 constructor would have, we should be OK. If it ever changes so that 

1531 we *must* pass a class, we'll create an UnsafeHTTPSConnection class 

1532 which just sets check_domain to False in the class definition, and 

1533 choose which one to pass to do_open. 

1534 """ 

1535 result = HTTPSConnection(*args, **kwargs) 

1536 if self.ca_certs: 

1537 result.ca_certs = self.ca_certs 

1538 result.check_domain = self.check_domain 

1539 return result 

1540 

1541 def https_open(self, req): 

1542 try: 

1543 return self.do_open(self._conn_maker, req) 

1544 except URLError as e: 

1545 if 'certificate verify failed' in str(e.reason): 

1546 raise CertificateError( 

1547 'Unable to verify server certificate ' 

1548 'for %s' % req.host) 

1549 else: 

1550 raise 

1551 

1552 # 

1553 # To prevent against mixing HTTP traffic with HTTPS (examples: A Man-In-The- 

1554 # Middle proxy using HTTP listens on port 443, or an index mistakenly serves 

1555 # HTML containing a http://xyz link when it should be https://xyz), 

1556 # you can use the following handler class, which does not allow HTTP traffic. 

1557 # 

1558 # It works by inheriting from HTTPHandler - so build_opener won't add a 

1559 # handler for HTTP itself. 

1560 # 

1561 class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler): 

1562 

1563 def http_open(self, req): 

1564 raise URLError( 

1565 'Unexpected HTTP request on what should be a secure ' 

1566 'connection: %s' % req) 

1567 

1568 

1569# 

1570# XML-RPC with timeouts 

1571# 

1572class Transport(xmlrpclib.Transport): 

1573 

1574 def __init__(self, timeout, use_datetime=0): 

1575 self.timeout = timeout 

1576 xmlrpclib.Transport.__init__(self, use_datetime) 

1577 

1578 def make_connection(self, host): 

1579 h, eh, x509 = self.get_host_info(host) 

1580 if not self._connection or host != self._connection[0]: 

1581 self._extra_headers = eh 

1582 self._connection = host, httplib.HTTPConnection(h) 

1583 return self._connection[1] 

1584 

1585 

1586if ssl: 

1587 

1588 class SafeTransport(xmlrpclib.SafeTransport): 

1589 

1590 def __init__(self, timeout, use_datetime=0): 

1591 self.timeout = timeout 

1592 xmlrpclib.SafeTransport.__init__(self, use_datetime) 

1593 

1594 def make_connection(self, host): 

1595 h, eh, kwargs = self.get_host_info(host) 

1596 if not kwargs: 

1597 kwargs = {} 

1598 kwargs['timeout'] = self.timeout 

1599 if not self._connection or host != self._connection[0]: 

1600 self._extra_headers = eh 

1601 self._connection = host, httplib.HTTPSConnection( 

1602 h, None, **kwargs) 

1603 return self._connection[1] 

1604 

1605 

1606class ServerProxy(xmlrpclib.ServerProxy): 

1607 

1608 def __init__(self, uri, **kwargs): 

1609 self.timeout = timeout = kwargs.pop('timeout', None) 

1610 # The above classes only come into play if a timeout 

1611 # is specified 

1612 if timeout is not None: 

1613 # scheme = splittype(uri) # deprecated as of Python 3.8 

1614 scheme = urlparse(uri)[0] 

1615 use_datetime = kwargs.get('use_datetime', 0) 

1616 if scheme == 'https': 

1617 tcls = SafeTransport 

1618 else: 

1619 tcls = Transport 

1620 kwargs['transport'] = t = tcls(timeout, use_datetime=use_datetime) 

1621 self.transport = t 

1622 xmlrpclib.ServerProxy.__init__(self, uri, **kwargs) 

1623 

1624 

1625# 

1626# CSV functionality. This is provided because on 2.x, the csv module can't 

1627# handle Unicode. However, we need to deal with Unicode in e.g. RECORD files. 

1628# 

1629 

1630 

1631def _csv_open(fn, mode, **kwargs): 

1632 if sys.version_info[0] < 3: 

1633 mode += 'b' 

1634 else: 

1635 kwargs['newline'] = '' 

1636 # Python 3 determines encoding from locale. Force 'utf-8' 

1637 # file encoding to match other forced utf-8 encoding 

1638 kwargs['encoding'] = 'utf-8' 

1639 return open(fn, mode, **kwargs) 

1640 

1641 

1642class CSVBase(object): 

1643 defaults = { 

1644 'delimiter': str(','), # The strs are used because we need native 

1645 'quotechar': str('"'), # str in the csv API (2.x won't take 

1646 'lineterminator': str('\n') # Unicode) 

1647 } 

1648 

1649 def __enter__(self): 

1650 return self 

1651 

1652 def __exit__(self, *exc_info): 

1653 self.stream.close() 

1654 

1655 

1656class CSVReader(CSVBase): 

1657 

1658 def __init__(self, **kwargs): 

1659 if 'stream' in kwargs: 

1660 stream = kwargs['stream'] 

1661 if sys.version_info[0] >= 3: 

1662 # needs to be a text stream 

1663 stream = codecs.getreader('utf-8')(stream) 

1664 self.stream = stream 

1665 else: 

1666 self.stream = _csv_open(kwargs['path'], 'r') 

1667 self.reader = csv.reader(self.stream, **self.defaults) 

1668 

1669 def __iter__(self): 

1670 return self 

1671 

1672 def next(self): 

1673 result = next(self.reader) 

1674 if sys.version_info[0] < 3: 

1675 for i, item in enumerate(result): 

1676 if not isinstance(item, text_type): 

1677 result[i] = item.decode('utf-8') 

1678 return result 

1679 

1680 __next__ = next 

1681 

1682 

1683class CSVWriter(CSVBase): 

1684 

1685 def __init__(self, fn, **kwargs): 

1686 self.stream = _csv_open(fn, 'w') 

1687 self.writer = csv.writer(self.stream, **self.defaults) 

1688 

1689 def writerow(self, row): 

1690 if sys.version_info[0] < 3: 

1691 r = [] 

1692 for item in row: 

1693 if isinstance(item, text_type): 

1694 item = item.encode('utf-8') 

1695 r.append(item) 

1696 row = r 

1697 self.writer.writerow(row) 

1698 

1699 

1700# 

1701# Configurator functionality 

1702# 

1703 

1704 

1705class Configurator(BaseConfigurator): 

1706 

1707 value_converters = dict(BaseConfigurator.value_converters) 

1708 value_converters['inc'] = 'inc_convert' 

1709 

1710 def __init__(self, config, base=None): 

1711 super(Configurator, self).__init__(config) 

1712 self.base = base or os.getcwd() 

1713 

1714 def configure_custom(self, config): 

1715 

1716 def convert(o): 

1717 if isinstance(o, (list, tuple)): 

1718 result = type(o)([convert(i) for i in o]) 

1719 elif isinstance(o, dict): 

1720 if '()' in o: 

1721 result = self.configure_custom(o) 

1722 else: 

1723 result = {} 

1724 for k in o: 

1725 result[k] = convert(o[k]) 

1726 else: 

1727 result = self.convert(o) 

1728 return result 

1729 

1730 c = config.pop('()') 

1731 if not callable(c): 

1732 c = self.resolve(c) 

1733 props = config.pop('.', None) 

1734 # Check for valid identifiers 

1735 args = config.pop('[]', ()) 

1736 if args: 

1737 args = tuple([convert(o) for o in args]) 

1738 items = [(k, convert(config[k])) for k in config if valid_ident(k)] 

1739 kwargs = dict(items) 

1740 result = c(*args, **kwargs) 

1741 if props: 

1742 for n, v in props.items(): 

1743 setattr(result, n, convert(v)) 

1744 return result 

1745 

1746 def __getitem__(self, key): 

1747 result = self.config[key] 

1748 if isinstance(result, dict) and '()' in result: 

1749 self.config[key] = result = self.configure_custom(result) 

1750 return result 

1751 

1752 def inc_convert(self, value): 

1753 """Default converter for the inc:// protocol.""" 

1754 if not os.path.isabs(value): 

1755 value = os.path.join(self.base, value) 

1756 with codecs.open(value, 'r', encoding='utf-8') as f: 

1757 result = json.load(f) 

1758 return result 

1759 

1760 

1761class SubprocessMixin(object): 

1762 """ 

1763 Mixin for running subprocesses and capturing their output 

1764 """ 

1765 

1766 def __init__(self, verbose=False, progress=None): 

1767 self.verbose = verbose 

1768 self.progress = progress 

1769 

1770 def reader(self, stream, context): 

1771 """ 

1772 Read lines from a subprocess' output stream and either pass to a progress 

1773 callable (if specified) or write progress information to sys.stderr. 

1774 """ 

1775 progress = self.progress 

1776 verbose = self.verbose 

1777 while True: 

1778 s = stream.readline() 

1779 if not s: 

1780 break 

1781 if progress is not None: 

1782 progress(s, context) 

1783 else: 

1784 if not verbose: 

1785 sys.stderr.write('.') 

1786 else: 

1787 sys.stderr.write(s.decode('utf-8')) 

1788 sys.stderr.flush() 

1789 stream.close() 

1790 

1791 def run_command(self, cmd, **kwargs): 

1792 p = subprocess.Popen(cmd, 

1793 stdout=subprocess.PIPE, 

1794 stderr=subprocess.PIPE, 

1795 **kwargs) 

1796 t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout')) 

1797 t1.start() 

1798 t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr')) 

1799 t2.start() 

1800 p.wait() 

1801 t1.join() 

1802 t2.join() 

1803 if self.progress is not None: 

1804 self.progress('done.', 'main') 

1805 elif self.verbose: 

1806 sys.stderr.write('done.\n') 

1807 return p 

1808 

1809 

1810def normalize_name(name): 

1811 """Normalize a python package name a la PEP 503""" 

1812 # https://www.python.org/dev/peps/pep-0503/#normalized-names 

1813 return re.sub('[-_.]+', '-', name).lower() 

1814 

1815 

1816# def _get_pypirc_command(): 

1817# """ 

1818# Get the distutils command for interacting with PyPI configurations. 

1819# :return: the command. 

1820# """ 

1821# from distutils.core import Distribution 

1822# from distutils.config import PyPIRCCommand 

1823# d = Distribution() 

1824# return PyPIRCCommand(d) 

1825 

1826 

1827class PyPIRCFile(object): 

1828 

1829 DEFAULT_REPOSITORY = 'https://upload.pypi.org/legacy/' 

1830 DEFAULT_REALM = 'pypi' 

1831 

1832 def __init__(self, fn=None, url=None): 

1833 if fn is None: 

1834 fn = os.path.join(os.path.expanduser('~'), '.pypirc') 

1835 self.filename = fn 

1836 self.url = url 

1837 

1838 def read(self): 

1839 result = {} 

1840 

1841 if os.path.exists(self.filename): 

1842 repository = self.url or self.DEFAULT_REPOSITORY 

1843 

1844 config = configparser.RawConfigParser() 

1845 config.read(self.filename) 

1846 sections = config.sections() 

1847 if 'distutils' in sections: 

1848 # let's get the list of servers 

1849 index_servers = config.get('distutils', 'index-servers') 

1850 _servers = [ 

1851 server.strip() for server in index_servers.split('\n') 

1852 if server.strip() != '' 

1853 ] 

1854 if _servers == []: 

1855 # nothing set, let's try to get the default pypi 

1856 if 'pypi' in sections: 

1857 _servers = ['pypi'] 

1858 else: 

1859 for server in _servers: 

1860 result = {'server': server} 

1861 result['username'] = config.get(server, 'username') 

1862 

1863 # optional params 

1864 for key, default in (('repository', 

1865 self.DEFAULT_REPOSITORY), 

1866 ('realm', self.DEFAULT_REALM), 

1867 ('password', None)): 

1868 if config.has_option(server, key): 

1869 result[key] = config.get(server, key) 

1870 else: 

1871 result[key] = default 

1872 

1873 # work around people having "repository" for the "pypi" 

1874 # section of their config set to the HTTP (rather than 

1875 # HTTPS) URL 

1876 if (server == 'pypi' and repository 

1877 in (self.DEFAULT_REPOSITORY, 'pypi')): 

1878 result['repository'] = self.DEFAULT_REPOSITORY 

1879 elif (result['server'] != repository 

1880 and result['repository'] != repository): 

1881 result = {} 

1882 elif 'server-login' in sections: 

1883 # old format 

1884 server = 'server-login' 

1885 if config.has_option(server, 'repository'): 

1886 repository = config.get(server, 'repository') 

1887 else: 

1888 repository = self.DEFAULT_REPOSITORY 

1889 result = { 

1890 'username': config.get(server, 'username'), 

1891 'password': config.get(server, 'password'), 

1892 'repository': repository, 

1893 'server': server, 

1894 'realm': self.DEFAULT_REALM 

1895 } 

1896 return result 

1897 

1898 def update(self, username, password): 

1899 # import pdb; pdb.set_trace() 

1900 config = configparser.RawConfigParser() 

1901 fn = self.filename 

1902 config.read(fn) 

1903 if not config.has_section('pypi'): 

1904 config.add_section('pypi') 

1905 config.set('pypi', 'username', username) 

1906 config.set('pypi', 'password', password) 

1907 with open(fn, 'w') as f: 

1908 config.write(f) 

1909 

1910 

1911def _load_pypirc(index): 

1912 """ 

1913 Read the PyPI access configuration as supported by distutils. 

1914 """ 

1915 return PyPIRCFile(url=index.url).read() 

1916 

1917 

1918def _store_pypirc(index): 

1919 PyPIRCFile().update(index.username, index.password) 

1920 

1921 

1922# 

1923# get_platform()/get_host_platform() copied from Python 3.10.a0 source, with some minor 

1924# tweaks 

1925# 

1926 

1927 

1928def get_host_platform(): 

1929 """Return a string that identifies the current platform. This is used mainly to 

1930 distinguish platform-specific build directories and platform-specific built 

1931 distributions. Typically includes the OS name and version and the 

1932 architecture (as supplied by 'os.uname()'), although the exact information 

1933 included depends on the OS; eg. on Linux, the kernel version isn't 

1934 particularly important. 

1935 

1936 Examples of returned values: 

1937 linux-i586 

1938 linux-alpha (?) 

1939 solaris-2.6-sun4u 

1940 

1941 Windows will return one of: 

1942 win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc) 

1943 win32 (all others - specifically, sys.platform is returned) 

1944 

1945 For other non-POSIX platforms, currently just returns 'sys.platform'. 

1946 

1947 """ 

1948 if os.name == 'nt': 

1949 if 'amd64' in sys.version.lower(): 

1950 return 'win-amd64' 

1951 if '(arm)' in sys.version.lower(): 

1952 return 'win-arm32' 

1953 if '(arm64)' in sys.version.lower(): 

1954 return 'win-arm64' 

1955 return sys.platform 

1956 

1957 # Set for cross builds explicitly 

1958 if "_PYTHON_HOST_PLATFORM" in os.environ: 

1959 return os.environ["_PYTHON_HOST_PLATFORM"] 

1960 

1961 if os.name != 'posix' or not hasattr(os, 'uname'): 

1962 # XXX what about the architecture? NT is Intel or Alpha, 

1963 # Mac OS is M68k or PPC, etc. 

1964 return sys.platform 

1965 

1966 # Try to distinguish various flavours of Unix 

1967 

1968 (osname, host, release, version, machine) = os.uname() 

1969 

1970 # Convert the OS name to lowercase, remove '/' characters, and translate 

1971 # spaces (for "Power Macintosh") 

1972 osname = osname.lower().replace('/', '') 

1973 machine = machine.replace(' ', '_').replace('/', '-') 

1974 

1975 if osname[:5] == 'linux': 

1976 # At least on Linux/Intel, 'machine' is the processor -- 

1977 # i386, etc. 

1978 # XXX what about Alpha, SPARC, etc? 

1979 return "%s-%s" % (osname, machine) 

1980 

1981 elif osname[:5] == 'sunos': 

1982 if release[0] >= '5': # SunOS 5 == Solaris 2 

1983 osname = 'solaris' 

1984 release = '%d.%s' % (int(release[0]) - 3, release[2:]) 

1985 # We can't use 'platform.architecture()[0]' because a 

1986 # bootstrap problem. We use a dict to get an error 

1987 # if some suspicious happens. 

1988 bitness = {2147483647: '32bit', 9223372036854775807: '64bit'} 

1989 machine += '.%s' % bitness[sys.maxsize] 

1990 # fall through to standard osname-release-machine representation 

1991 elif osname[:3] == 'aix': 

1992 from _aix_support import aix_platform 

1993 return aix_platform() 

1994 elif osname[:6] == 'cygwin': 

1995 osname = 'cygwin' 

1996 rel_re = re.compile(r'[\d.]+', re.ASCII) 

1997 m = rel_re.match(release) 

1998 if m: 

1999 release = m.group() 

2000 elif osname[:6] == 'darwin': 

2001 import _osx_support 

2002 try: 

2003 from distutils import sysconfig 

2004 except ImportError: 

2005 import sysconfig 

2006 osname, release, machine = _osx_support.get_platform_osx( 

2007 sysconfig.get_config_vars(), osname, release, machine) 

2008 

2009 return '%s-%s-%s' % (osname, release, machine) 

2010 

2011 

2012_TARGET_TO_PLAT = { 

2013 'x86': 'win32', 

2014 'x64': 'win-amd64', 

2015 'arm': 'win-arm32', 

2016} 

2017 

2018 

2019def get_platform(): 

2020 if os.name != 'nt': 

2021 return get_host_platform() 

2022 cross_compilation_target = os.environ.get('VSCMD_ARG_TGT_ARCH') 

2023 if cross_compilation_target not in _TARGET_TO_PLAT: 

2024 return get_host_platform() 

2025 return _TARGET_TO_PLAT[cross_compilation_target]