Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.11/site-packages/distlib/util.py: 20%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1273 statements  

1# 

2# Copyright (C) 2012-2023 The Python Software Foundation. 

3# See LICENSE.txt and CONTRIBUTORS.txt. 

4# 

5import codecs 

6from collections import deque 

7import contextlib 

8import csv 

9from glob import iglob as std_iglob 

10import io 

11import json 

12import logging 

13import os 

14import py_compile 

15import re 

16import socket 

17try: 

18 import ssl 

19except ImportError: # pragma: no cover 

20 ssl = None 

21import subprocess 

22import sys 

23import tarfile 

24import tempfile 

25import textwrap 

26 

27try: 

28 import threading 

29except ImportError: # pragma: no cover 

30 import dummy_threading as threading 

31import time 

32 

33from . import DistlibException 

34from .compat import (string_types, text_type, shutil, raw_input, StringIO, cache_from_source, urlopen, urljoin, httplib, 

35 xmlrpclib, HTTPHandler, BaseConfigurator, valid_ident, Container, configparser, URLError, ZipFile, 

36 fsdecode, unquote, urlparse) 

37 

38logger = logging.getLogger(__name__) 

39 

40# 

41# Requirement parsing code as per PEP 508 

42# 

43 

44IDENTIFIER = re.compile(r'^([\w\.-]+)\s*') 

45VERSION_IDENTIFIER = re.compile(r'^([\w\.*+-]+)\s*') 

46COMPARE_OP = re.compile(r'^(<=?|>=?|={2,3}|[~!]=)\s*') 

47MARKER_OP = re.compile(r'^((<=?)|(>=?)|={2,3}|[~!]=|in|not\s+in)\s*') 

48OR = re.compile(r'^or\b\s*') 

49AND = re.compile(r'^and\b\s*') 

50NON_SPACE = re.compile(r'(\S+)\s*') 

51STRING_CHUNK = re.compile(r'([\s\w\.{}()*+#:;,/?!~`@$%^&=|<>\[\]-]+)') 

52 

53 

54def parse_marker(marker_string): 

55 """ 

56 Parse a marker string and return a dictionary containing a marker expression. 

57 

58 The dictionary will contain keys "op", "lhs" and "rhs" for non-terminals in 

59 the expression grammar, or strings. A string contained in quotes is to be 

60 interpreted as a literal string, and a string not contained in quotes is a 

61 variable (such as os_name). 

62 """ 

63 

64 def marker_var(remaining): 

65 # either identifier, or literal string 

66 m = IDENTIFIER.match(remaining) 

67 if m: 

68 result = m.groups()[0] 

69 remaining = remaining[m.end():] 

70 elif not remaining: 

71 raise SyntaxError('unexpected end of input') 

72 else: 

73 q = remaining[0] 

74 if q not in '\'"': 

75 raise SyntaxError('invalid expression: %s' % remaining) 

76 oq = '\'"'.replace(q, '') 

77 remaining = remaining[1:] 

78 parts = [q] 

79 while remaining: 

80 # either a string chunk, or oq, or q to terminate 

81 if remaining[0] == q: 

82 break 

83 elif remaining[0] == oq: 

84 parts.append(oq) 

85 remaining = remaining[1:] 

86 else: 

87 m = STRING_CHUNK.match(remaining) 

88 if not m: 

89 raise SyntaxError('error in string literal: %s' % remaining) 

90 parts.append(m.groups()[0]) 

91 remaining = remaining[m.end():] 

92 else: 

93 s = ''.join(parts) 

94 raise SyntaxError('unterminated string: %s' % s) 

95 parts.append(q) 

96 result = ''.join(parts) 

97 remaining = remaining[1:].lstrip() # skip past closing quote 

98 return result, remaining 

99 

100 def marker_expr(remaining): 

101 if remaining and remaining[0] == '(': 

102 result, remaining = marker(remaining[1:].lstrip()) 

103 if remaining[0] != ')': 

104 raise SyntaxError('unterminated parenthesis: %s' % remaining) 

105 remaining = remaining[1:].lstrip() 

106 else: 

107 lhs, remaining = marker_var(remaining) 

108 while remaining: 

109 m = MARKER_OP.match(remaining) 

110 if not m: 

111 break 

112 op = m.groups()[0] 

113 remaining = remaining[m.end():] 

114 rhs, remaining = marker_var(remaining) 

115 lhs = {'op': op, 'lhs': lhs, 'rhs': rhs} 

116 result = lhs 

117 return result, remaining 

118 

119 def marker_and(remaining): 

120 lhs, remaining = marker_expr(remaining) 

121 while remaining: 

122 m = AND.match(remaining) 

123 if not m: 

124 break 

125 remaining = remaining[m.end():] 

126 rhs, remaining = marker_expr(remaining) 

127 lhs = {'op': 'and', 'lhs': lhs, 'rhs': rhs} 

128 return lhs, remaining 

129 

130 def marker(remaining): 

131 lhs, remaining = marker_and(remaining) 

132 while remaining: 

133 m = OR.match(remaining) 

134 if not m: 

135 break 

136 remaining = remaining[m.end():] 

137 rhs, remaining = marker_and(remaining) 

138 lhs = {'op': 'or', 'lhs': lhs, 'rhs': rhs} 

139 return lhs, remaining 

140 

141 return marker(marker_string) 

142 

143 

144def parse_requirement(req): 

145 """ 

146 Parse a requirement passed in as a string. Return a Container 

147 whose attributes contain the various parts of the requirement. 

148 """ 

149 remaining = req.strip() 

150 if not remaining or remaining.startswith('#'): 

151 return None 

152 m = IDENTIFIER.match(remaining) 

153 if not m: 

154 raise SyntaxError('name expected: %s' % remaining) 

155 distname = m.groups()[0] 

156 remaining = remaining[m.end():] 

157 extras = mark_expr = versions = uri = None 

158 if remaining and remaining[0] == '[': 

159 i = remaining.find(']', 1) 

160 if i < 0: 

161 raise SyntaxError('unterminated extra: %s' % remaining) 

162 s = remaining[1:i] 

163 remaining = remaining[i + 1:].lstrip() 

164 extras = [] 

165 while s: 

166 m = IDENTIFIER.match(s) 

167 if not m: 

168 raise SyntaxError('malformed extra: %s' % s) 

169 extras.append(m.groups()[0]) 

170 s = s[m.end():] 

171 if not s: 

172 break 

173 if s[0] != ',': 

174 raise SyntaxError('comma expected in extras: %s' % s) 

175 s = s[1:].lstrip() 

176 if not extras: 

177 extras = None 

178 if remaining: 

179 if remaining[0] == '@': 

180 # it's a URI 

181 remaining = remaining[1:].lstrip() 

182 m = NON_SPACE.match(remaining) 

183 if not m: 

184 raise SyntaxError('invalid URI: %s' % remaining) 

185 uri = m.groups()[0] 

186 t = urlparse(uri) 

187 # there are issues with Python and URL parsing, so this test 

188 # is a bit crude. See bpo-20271, bpo-23505. Python doesn't 

189 # always parse invalid URLs correctly - it should raise 

190 # exceptions for malformed URLs 

191 if not (t.scheme and t.netloc): 

192 raise SyntaxError('Invalid URL: %s' % uri) 

193 remaining = remaining[m.end():].lstrip() 

194 else: 

195 

196 def get_versions(ver_remaining): 

197 """ 

198 Return a list of operator, version tuples if any are 

199 specified, else None. 

200 """ 

201 m = COMPARE_OP.match(ver_remaining) 

202 versions = None 

203 if m: 

204 versions = [] 

205 while True: 

206 op = m.groups()[0] 

207 ver_remaining = ver_remaining[m.end():] 

208 m = VERSION_IDENTIFIER.match(ver_remaining) 

209 if not m: 

210 raise SyntaxError('invalid version: %s' % ver_remaining) 

211 v = m.groups()[0] 

212 versions.append((op, v)) 

213 ver_remaining = ver_remaining[m.end():] 

214 if not ver_remaining or ver_remaining[0] != ',': 

215 break 

216 ver_remaining = ver_remaining[1:].lstrip() 

217 # Some packages have a trailing comma which would break things 

218 # See issue #148 

219 if not ver_remaining: 

220 break 

221 m = COMPARE_OP.match(ver_remaining) 

222 if not m: 

223 raise SyntaxError('invalid constraint: %s' % ver_remaining) 

224 if not versions: 

225 versions = None 

226 return versions, ver_remaining 

227 

228 if remaining[0] != '(': 

229 versions, remaining = get_versions(remaining) 

230 else: 

231 i = remaining.find(')', 1) 

232 if i < 0: 

233 raise SyntaxError('unterminated parenthesis: %s' % remaining) 

234 s = remaining[1:i] 

235 remaining = remaining[i + 1:].lstrip() 

236 # As a special diversion from PEP 508, allow a version number 

237 # a.b.c in parentheses as a synonym for ~= a.b.c (because this 

238 # is allowed in earlier PEPs) 

239 if COMPARE_OP.match(s): 

240 versions, _ = get_versions(s) 

241 else: 

242 m = VERSION_IDENTIFIER.match(s) 

243 if not m: 

244 raise SyntaxError('invalid constraint: %s' % s) 

245 v = m.groups()[0] 

246 s = s[m.end():].lstrip() 

247 if s: 

248 raise SyntaxError('invalid constraint: %s' % s) 

249 versions = [('~=', v)] 

250 

251 if remaining: 

252 if remaining[0] != ';': 

253 raise SyntaxError('invalid requirement: %s' % remaining) 

254 remaining = remaining[1:].lstrip() 

255 

256 mark_expr, remaining = parse_marker(remaining) 

257 

258 if remaining and remaining[0] != '#': 

259 raise SyntaxError('unexpected trailing data: %s' % remaining) 

260 

261 if not versions: 

262 rs = distname 

263 else: 

264 rs = '%s %s' % (distname, ', '.join(['%s %s' % con for con in versions])) 

265 return Container(name=distname, extras=extras, constraints=versions, marker=mark_expr, url=uri, requirement=rs) 

266 

267 

268def get_resources_dests(resources_root, rules): 

269 """Find destinations for resources files""" 

270 

271 def get_rel_path(root, path): 

272 # normalizes and returns a lstripped-/-separated path 

273 root = root.replace(os.path.sep, '/') 

274 path = path.replace(os.path.sep, '/') 

275 assert path.startswith(root) 

276 return path[len(root):].lstrip('/') 

277 

278 destinations = {} 

279 for base, suffix, dest in rules: 

280 prefix = os.path.join(resources_root, base) 

281 for abs_base in iglob(prefix): 

282 abs_glob = os.path.join(abs_base, suffix) 

283 for abs_path in iglob(abs_glob): 

284 resource_file = get_rel_path(resources_root, abs_path) 

285 if dest is None: # remove the entry if it was here 

286 destinations.pop(resource_file, None) 

287 else: 

288 rel_path = get_rel_path(abs_base, abs_path) 

289 rel_dest = dest.replace(os.path.sep, '/').rstrip('/') 

290 destinations[resource_file] = rel_dest + '/' + rel_path 

291 return destinations 

292 

293 

294def in_venv(): 

295 if hasattr(sys, 'real_prefix'): 

296 # virtualenv venvs 

297 result = True 

298 else: 

299 # PEP 405 venvs 

300 result = sys.prefix != getattr(sys, 'base_prefix', sys.prefix) 

301 return result 

302 

303 

304def get_executable(): 

305 # The __PYVENV_LAUNCHER__ dance is apparently no longer needed, as 

306 # changes to the stub launcher mean that sys.executable always points 

307 # to the stub on OS X 

308 # if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__' 

309 # in os.environ): 

310 # result = os.environ['__PYVENV_LAUNCHER__'] 

311 # else: 

312 # result = sys.executable 

313 # return result 

314 # Avoid normcasing: see issue #143 

315 # result = os.path.normcase(sys.executable) 

316 result = sys.executable 

317 if not isinstance(result, text_type): 

318 result = fsdecode(result) 

319 return result 

320 

321 

322def proceed(prompt, allowed_chars, error_prompt=None, default=None): 

323 p = prompt 

324 while True: 

325 s = raw_input(p) 

326 p = prompt 

327 if not s and default: 

328 s = default 

329 if s: 

330 c = s[0].lower() 

331 if c in allowed_chars: 

332 break 

333 if error_prompt: 

334 p = '%c: %s\n%s' % (c, error_prompt, prompt) 

335 return c 

336 

337 

338def extract_by_key(d, keys): 

339 if isinstance(keys, string_types): 

340 keys = keys.split() 

341 result = {} 

342 for key in keys: 

343 if key in d: 

344 result[key] = d[key] 

345 return result 

346 

347 

348def read_exports(stream): 

349 if sys.version_info[0] >= 3: 

350 # needs to be a text stream 

351 stream = codecs.getreader('utf-8')(stream) 

352 # Try to load as JSON, falling back on legacy format 

353 data = stream.read() 

354 stream = StringIO(data) 

355 try: 

356 jdata = json.load(stream) 

357 result = jdata['extensions']['python.exports']['exports'] 

358 for group, entries in result.items(): 

359 for k, v in entries.items(): 

360 s = '%s = %s' % (k, v) 

361 entry = get_export_entry(s) 

362 assert entry is not None 

363 entries[k] = entry 

364 return result 

365 except Exception: 

366 stream.seek(0, 0) 

367 

368 def read_stream(cp, stream): 

369 if hasattr(cp, 'read_file'): 

370 cp.read_file(stream) 

371 else: 

372 cp.readfp(stream) 

373 

374 cp = configparser.ConfigParser() 

375 try: 

376 read_stream(cp, stream) 

377 except configparser.MissingSectionHeaderError: 

378 stream.close() 

379 data = textwrap.dedent(data) 

380 stream = StringIO(data) 

381 read_stream(cp, stream) 

382 

383 result = {} 

384 for key in cp.sections(): 

385 result[key] = entries = {} 

386 for name, value in cp.items(key): 

387 s = '%s = %s' % (name, value) 

388 entry = get_export_entry(s) 

389 assert entry is not None 

390 # entry.dist = self 

391 entries[name] = entry 

392 return result 

393 

394 

395def write_exports(exports, stream): 

396 if sys.version_info[0] >= 3: 

397 # needs to be a text stream 

398 stream = codecs.getwriter('utf-8')(stream) 

399 cp = configparser.ConfigParser() 

400 for k, v in exports.items(): 

401 # TODO check k, v for valid values 

402 cp.add_section(k) 

403 for entry in v.values(): 

404 if entry.suffix is None: 

405 s = entry.prefix 

406 else: 

407 s = '%s:%s' % (entry.prefix, entry.suffix) 

408 if entry.flags: 

409 s = '%s [%s]' % (s, ', '.join(entry.flags)) 

410 cp.set(k, entry.name, s) 

411 cp.write(stream) 

412 

413 

414@contextlib.contextmanager 

415def tempdir(): 

416 td = tempfile.mkdtemp() 

417 try: 

418 yield td 

419 finally: 

420 shutil.rmtree(td) 

421 

422 

423@contextlib.contextmanager 

424def chdir(d): 

425 cwd = os.getcwd() 

426 try: 

427 os.chdir(d) 

428 yield 

429 finally: 

430 os.chdir(cwd) 

431 

432 

433@contextlib.contextmanager 

434def socket_timeout(seconds=15): 

435 cto = socket.getdefaulttimeout() 

436 try: 

437 socket.setdefaulttimeout(seconds) 

438 yield 

439 finally: 

440 socket.setdefaulttimeout(cto) 

441 

442 

443class cached_property(object): 

444 

445 def __init__(self, func): 

446 self.func = func 

447 # for attr in ('__name__', '__module__', '__doc__'): 

448 # setattr(self, attr, getattr(func, attr, None)) 

449 

450 def __get__(self, obj, cls=None): 

451 if obj is None: 

452 return self 

453 value = self.func(obj) 

454 object.__setattr__(obj, self.func.__name__, value) 

455 # obj.__dict__[self.func.__name__] = value = self.func(obj) 

456 return value 

457 

458 

459def convert_path(pathname): 

460 """Return 'pathname' as a name that will work on the native filesystem. 

461 

462 The path is split on '/' and put back together again using the current 

463 directory separator. Needed because filenames in the setup script are 

464 always supplied in Unix style, and have to be converted to the local 

465 convention before we can actually use them in the filesystem. Raises 

466 ValueError on non-Unix-ish systems if 'pathname' either starts or 

467 ends with a slash. 

468 """ 

469 if os.sep == '/': 

470 return pathname 

471 if not pathname: 

472 return pathname 

473 if pathname[0] == '/': 

474 raise ValueError("path '%s' cannot be absolute" % pathname) 

475 if pathname[-1] == '/': 

476 raise ValueError("path '%s' cannot end with '/'" % pathname) 

477 

478 paths = pathname.split('/') 

479 while os.curdir in paths: 

480 paths.remove(os.curdir) 

481 if not paths: 

482 return os.curdir 

483 return os.path.join(*paths) 

484 

485 

486class FileOperator(object): 

487 

488 def __init__(self, dry_run=False): 

489 self.dry_run = dry_run 

490 self.ensured = set() 

491 self._init_record() 

492 

493 def _init_record(self): 

494 self.record = False 

495 self.files_written = set() 

496 self.dirs_created = set() 

497 

498 def record_as_written(self, path): 

499 if self.record: 

500 self.files_written.add(path) 

501 

502 def newer(self, source, target): 

503 """Tell if the target is newer than the source. 

504 

505 Returns true if 'source' exists and is more recently modified than 

506 'target', or if 'source' exists and 'target' doesn't. 

507 

508 Returns false if both exist and 'target' is the same age or younger 

509 than 'source'. Raise PackagingFileError if 'source' does not exist. 

510 

511 Note that this test is not very accurate: files created in the same 

512 second will have the same "age". 

513 """ 

514 if not os.path.exists(source): 

515 raise DistlibException("file '%r' does not exist" % os.path.abspath(source)) 

516 if not os.path.exists(target): 

517 return True 

518 

519 return os.stat(source).st_mtime > os.stat(target).st_mtime 

520 

521 def copy_file(self, infile, outfile, check=True): 

522 """Copy a file respecting dry-run and force flags. 

523 """ 

524 self.ensure_dir(os.path.dirname(outfile)) 

525 logger.info('Copying %s to %s', infile, outfile) 

526 if not self.dry_run: 

527 msg = None 

528 if check: 

529 if os.path.islink(outfile): 

530 msg = '%s is a symlink' % outfile 

531 elif os.path.exists(outfile) and not os.path.isfile(outfile): 

532 msg = '%s is a non-regular file' % outfile 

533 if msg: 

534 raise ValueError(msg + ' which would be overwritten') 

535 shutil.copyfile(infile, outfile) 

536 self.record_as_written(outfile) 

537 

538 def copy_stream(self, instream, outfile, encoding=None): 

539 assert not os.path.isdir(outfile) 

540 self.ensure_dir(os.path.dirname(outfile)) 

541 logger.info('Copying stream %s to %s', instream, outfile) 

542 if not self.dry_run: 

543 if encoding is None: 

544 outstream = open(outfile, 'wb') 

545 else: 

546 outstream = codecs.open(outfile, 'w', encoding=encoding) 

547 try: 

548 shutil.copyfileobj(instream, outstream) 

549 finally: 

550 outstream.close() 

551 self.record_as_written(outfile) 

552 

553 def write_binary_file(self, path, data): 

554 self.ensure_dir(os.path.dirname(path)) 

555 if not self.dry_run: 

556 if os.path.exists(path): 

557 os.remove(path) 

558 with open(path, 'wb') as f: 

559 f.write(data) 

560 self.record_as_written(path) 

561 

562 def write_text_file(self, path, data, encoding): 

563 self.write_binary_file(path, data.encode(encoding)) 

564 

565 def set_mode(self, bits, mask, files): 

566 if os.name == 'posix' or (os.name == 'java' and os._name == 'posix'): 

567 # Set the executable bits (owner, group, and world) on 

568 # all the files specified. 

569 for f in files: 

570 if self.dry_run: 

571 logger.info("changing mode of %s", f) 

572 else: 

573 mode = (os.stat(f).st_mode | bits) & mask 

574 logger.info("changing mode of %s to %o", f, mode) 

575 os.chmod(f, mode) 

576 

577 set_executable_mode = lambda s, f: s.set_mode(0o555, 0o7777, f) 

578 

579 def ensure_dir(self, path): 

580 path = os.path.abspath(path) 

581 if path not in self.ensured and not os.path.exists(path): 

582 self.ensured.add(path) 

583 d, f = os.path.split(path) 

584 self.ensure_dir(d) 

585 logger.info('Creating %s' % path) 

586 if not self.dry_run: 

587 os.mkdir(path) 

588 if self.record: 

589 self.dirs_created.add(path) 

590 

591 def byte_compile(self, path, optimize=False, force=False, prefix=None, hashed_invalidation=False): 

592 dpath = cache_from_source(path, not optimize) 

593 logger.info('Byte-compiling %s to %s', path, dpath) 

594 if not self.dry_run: 

595 if force or self.newer(path, dpath): 

596 if not prefix: 

597 diagpath = None 

598 else: 

599 assert path.startswith(prefix) 

600 diagpath = path[len(prefix):] 

601 compile_kwargs = {} 

602 if hashed_invalidation and hasattr(py_compile, 'PycInvalidationMode'): 

603 if not isinstance(hashed_invalidation, py_compile.PycInvalidationMode): 

604 hashed_invalidation = py_compile.PycInvalidationMode.CHECKED_HASH 

605 compile_kwargs['invalidation_mode'] = hashed_invalidation 

606 py_compile.compile(path, dpath, diagpath, True, **compile_kwargs) # raise error 

607 self.record_as_written(dpath) 

608 return dpath 

609 

610 def ensure_removed(self, path): 

611 if os.path.exists(path): 

612 if os.path.isdir(path) and not os.path.islink(path): 

613 logger.debug('Removing directory tree at %s', path) 

614 if not self.dry_run: 

615 shutil.rmtree(path) 

616 if self.record: 

617 if path in self.dirs_created: 

618 self.dirs_created.remove(path) 

619 else: 

620 if os.path.islink(path): 

621 s = 'link' 

622 else: 

623 s = 'file' 

624 logger.debug('Removing %s %s', s, path) 

625 if not self.dry_run: 

626 os.remove(path) 

627 if self.record: 

628 if path in self.files_written: 

629 self.files_written.remove(path) 

630 

631 def is_writable(self, path): 

632 result = False 

633 while not result: 

634 if os.path.exists(path): 

635 result = os.access(path, os.W_OK) 

636 break 

637 parent = os.path.dirname(path) 

638 if parent == path: 

639 break 

640 path = parent 

641 return result 

642 

643 def commit(self): 

644 """ 

645 Commit recorded changes, turn off recording, return 

646 changes. 

647 """ 

648 assert self.record 

649 result = self.files_written, self.dirs_created 

650 self._init_record() 

651 return result 

652 

653 def rollback(self): 

654 if not self.dry_run: 

655 for f in list(self.files_written): 

656 if os.path.exists(f): 

657 os.remove(f) 

658 # dirs should all be empty now, except perhaps for 

659 # __pycache__ subdirs 

660 # reverse so that subdirs appear before their parents 

661 dirs = sorted(self.dirs_created, reverse=True) 

662 for d in dirs: 

663 flist = os.listdir(d) 

664 if flist: 

665 assert flist == ['__pycache__'] 

666 sd = os.path.join(d, flist[0]) 

667 os.rmdir(sd) 

668 os.rmdir(d) # should fail if non-empty 

669 self._init_record() 

670 

671 

672def resolve(module_name, dotted_path): 

673 if module_name in sys.modules: 

674 mod = sys.modules[module_name] 

675 else: 

676 mod = __import__(module_name) 

677 if dotted_path is None: 

678 result = mod 

679 else: 

680 parts = dotted_path.split('.') 

681 result = getattr(mod, parts.pop(0)) 

682 for p in parts: 

683 result = getattr(result, p) 

684 return result 

685 

686 

687class ExportEntry(object): 

688 

689 def __init__(self, name, prefix, suffix, flags): 

690 self.name = name 

691 self.prefix = prefix 

692 self.suffix = suffix 

693 self.flags = flags 

694 

695 @cached_property 

696 def value(self): 

697 return resolve(self.prefix, self.suffix) 

698 

699 def __repr__(self): # pragma: no cover 

700 return '<ExportEntry %s = %s:%s %s>' % (self.name, self.prefix, self.suffix, self.flags) 

701 

702 def __eq__(self, other): 

703 if not isinstance(other, ExportEntry): 

704 result = False 

705 else: 

706 result = (self.name == other.name and self.prefix == other.prefix and self.suffix == other.suffix and 

707 self.flags == other.flags) 

708 return result 

709 

710 __hash__ = object.__hash__ 

711 

712 

713ENTRY_RE = re.compile( 

714 r'''(?P<name>([^\[]\S*)) 

715 \s*=\s*(?P<callable>(\w+)([:\.]\w+)*) 

716 \s*(\[\s*(?P<flags>[\w-]+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])? 

717 ''', re.VERBOSE) 

718 

719 

720def get_export_entry(specification): 

721 m = ENTRY_RE.search(specification) 

722 if not m: 

723 result = None 

724 if '[' in specification or ']' in specification: 

725 raise DistlibException("Invalid specification " 

726 "'%s'" % specification) 

727 else: 

728 d = m.groupdict() 

729 name = d['name'] 

730 path = d['callable'] 

731 colons = path.count(':') 

732 if colons == 0: 

733 prefix, suffix = path, None 

734 else: 

735 if colons != 1: 

736 raise DistlibException("Invalid specification " 

737 "'%s'" % specification) 

738 prefix, suffix = path.split(':') 

739 flags = d['flags'] 

740 if flags is None: 

741 if '[' in specification or ']' in specification: 

742 raise DistlibException("Invalid specification " 

743 "'%s'" % specification) 

744 flags = [] 

745 else: 

746 flags = [f.strip() for f in flags.split(',')] 

747 result = ExportEntry(name, prefix, suffix, flags) 

748 return result 

749 

750 

751def get_cache_base(suffix=None): 

752 """ 

753 Return the default base location for distlib caches. If the directory does 

754 not exist, it is created. Use the suffix provided for the base directory, 

755 and default to '.distlib' if it isn't provided. 

756 

757 On Windows, if LOCALAPPDATA is defined in the environment, then it is 

758 assumed to be a directory, and will be the parent directory of the result. 

759 On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home 

760 directory - using os.expanduser('~') - will be the parent directory of 

761 the result. 

762 

763 The result is just the directory '.distlib' in the parent directory as 

764 determined above, or with the name specified with ``suffix``. 

765 """ 

766 if suffix is None: 

767 suffix = '.distlib' 

768 if os.name == 'nt' and 'LOCALAPPDATA' in os.environ: 

769 result = os.path.expandvars('$localappdata') 

770 else: 

771 # Assume posix, or old Windows 

772 result = os.path.expanduser('~') 

773 # we use 'isdir' instead of 'exists', because we want to 

774 # fail if there's a file with that name 

775 if os.path.isdir(result): 

776 usable = os.access(result, os.W_OK) 

777 if not usable: 

778 logger.warning('Directory exists but is not writable: %s', result) 

779 else: 

780 try: 

781 os.makedirs(result) 

782 usable = True 

783 except OSError: 

784 logger.warning('Unable to create %s', result, exc_info=True) 

785 usable = False 

786 if not usable: 

787 result = tempfile.mkdtemp() 

788 logger.warning('Default location unusable, using %s', result) 

789 return os.path.join(result, suffix) 

790 

791 

792def path_to_cache_dir(path, use_abspath=True): 

793 """ 

794 Convert an absolute path to a directory name for use in a cache. 

795 

796 The algorithm used is: 

797 

798 #. On Windows, any ``':'`` in the drive is replaced with ``'---'``. 

799 #. Any occurrence of ``os.sep`` is replaced with ``'--'``. 

800 #. ``'.cache'`` is appended. 

801 """ 

802 d, p = os.path.splitdrive(os.path.abspath(path) if use_abspath else path) 

803 if d: 

804 d = d.replace(':', '---') 

805 p = p.replace(os.sep, '--') 

806 return d + p + '.cache' 

807 

808 

809def ensure_slash(s): 

810 if not s.endswith('/'): 

811 return s + '/' 

812 return s 

813 

814 

815def parse_credentials(netloc): 

816 username = password = None 

817 if '@' in netloc: 

818 prefix, netloc = netloc.rsplit('@', 1) 

819 if ':' not in prefix: 

820 username = prefix 

821 else: 

822 username, password = prefix.split(':', 1) 

823 if username: 

824 username = unquote(username) 

825 if password: 

826 password = unquote(password) 

827 return username, password, netloc 

828 

829 

830def get_process_umask(): 

831 result = os.umask(0o22) 

832 os.umask(result) 

833 return result 

834 

835 

836def is_string_sequence(seq): 

837 result = True 

838 i = None 

839 for i, s in enumerate(seq): 

840 if not isinstance(s, string_types): 

841 result = False 

842 break 

843 assert i is not None 

844 return result 

845 

846 

847PROJECT_NAME_AND_VERSION = re.compile('([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-' 

848 '([a-z0-9_.+-]+)', re.I) 

849PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)') 

850 

851 

852def split_filename(filename, project_name=None): 

853 """ 

854 Extract name, version, python version from a filename (no extension) 

855 

856 Return name, version, pyver or None 

857 """ 

858 result = None 

859 pyver = None 

860 filename = unquote(filename).replace(' ', '-') 

861 m = PYTHON_VERSION.search(filename) 

862 if m: 

863 pyver = m.group(1) 

864 filename = filename[:m.start()] 

865 if project_name and len(filename) > len(project_name) + 1: 

866 m = re.match(re.escape(project_name) + r'\b', filename) 

867 if m: 

868 n = m.end() 

869 result = filename[:n], filename[n + 1:], pyver 

870 if result is None: 

871 m = PROJECT_NAME_AND_VERSION.match(filename) 

872 if m: 

873 result = m.group(1), m.group(3), pyver 

874 return result 

875 

876 

877# Allow spaces in name because of legacy dists like "Twisted Core" 

878NAME_VERSION_RE = re.compile(r'(?P<name>[\w .-]+)\s*' 

879 r'\(\s*(?P<ver>[^\s)]+)\)$') 

880 

881 

882def parse_name_and_version(p): 

883 """ 

884 A utility method used to get name and version from a string. 

885 

886 From e.g. a Provides-Dist value. 

887 

888 :param p: A value in a form 'foo (1.0)' 

889 :return: The name and version as a tuple. 

890 """ 

891 m = NAME_VERSION_RE.match(p) 

892 if not m: 

893 raise DistlibException('Ill-formed name/version string: \'%s\'' % p) 

894 d = m.groupdict() 

895 return d['name'].strip().lower(), d['ver'] 

896 

897 

898def get_extras(requested, available): 

899 result = set() 

900 requested = set(requested or []) 

901 available = set(available or []) 

902 if '*' in requested: 

903 requested.remove('*') 

904 result |= available 

905 for r in requested: 

906 if r == '-': 

907 result.add(r) 

908 elif r.startswith('-'): 

909 unwanted = r[1:] 

910 if unwanted not in available: 

911 logger.warning('undeclared extra: %s' % unwanted) 

912 if unwanted in result: 

913 result.remove(unwanted) 

914 else: 

915 if r not in available: 

916 logger.warning('undeclared extra: %s' % r) 

917 result.add(r) 

918 return result 

919 

920 

921# 

922# Extended metadata functionality 

923# 

924 

925 

926def _get_external_data(url): 

927 result = {} 

928 try: 

929 # urlopen might fail if it runs into redirections, 

930 # because of Python issue #13696. Fixed in locators 

931 # using a custom redirect handler. 

932 resp = urlopen(url) 

933 headers = resp.info() 

934 ct = headers.get('Content-Type') 

935 if not ct.startswith('application/json'): 

936 logger.debug('Unexpected response for JSON request: %s', ct) 

937 else: 

938 reader = codecs.getreader('utf-8')(resp) 

939 # data = reader.read().decode('utf-8') 

940 # result = json.loads(data) 

941 result = json.load(reader) 

942 except Exception as e: 

943 logger.exception('Failed to get external data for %s: %s', url, e) 

944 return result 

945 

946 

947_external_data_base_url = 'https://www.red-dove.com/pypi/projects/' 

948 

949 

950def get_project_data(name): 

951 url = '%s/%s/project.json' % (name[0].upper(), name) 

952 url = urljoin(_external_data_base_url, url) 

953 result = _get_external_data(url) 

954 return result 

955 

956 

957def get_package_data(name, version): 

958 url = '%s/%s/package-%s.json' % (name[0].upper(), name, version) 

959 url = urljoin(_external_data_base_url, url) 

960 return _get_external_data(url) 

961 

962 

963class Cache(object): 

964 """ 

965 A class implementing a cache for resources that need to live in the file system 

966 e.g. shared libraries. This class was moved from resources to here because it 

967 could be used by other modules, e.g. the wheel module. 

968 """ 

969 

970 def __init__(self, base): 

971 """ 

972 Initialise an instance. 

973 

974 :param base: The base directory where the cache should be located. 

975 """ 

976 # we use 'isdir' instead of 'exists', because we want to 

977 # fail if there's a file with that name 

978 if not os.path.isdir(base): # pragma: no cover 

979 os.makedirs(base) 

980 if (os.stat(base).st_mode & 0o77) != 0: 

981 logger.warning('Directory \'%s\' is not private', base) 

982 self.base = os.path.abspath(os.path.normpath(base)) 

983 

984 def prefix_to_dir(self, prefix, use_abspath=True): 

985 """ 

986 Converts a resource prefix to a directory name in the cache. 

987 """ 

988 return path_to_cache_dir(prefix, use_abspath=use_abspath) 

989 

990 def clear(self): 

991 """ 

992 Clear the cache. 

993 """ 

994 not_removed = [] 

995 for fn in os.listdir(self.base): 

996 fn = os.path.join(self.base, fn) 

997 try: 

998 if os.path.islink(fn) or os.path.isfile(fn): 

999 os.remove(fn) 

1000 elif os.path.isdir(fn): 

1001 shutil.rmtree(fn) 

1002 except Exception: 

1003 not_removed.append(fn) 

1004 return not_removed 

1005 

1006 

1007class EventMixin(object): 

1008 """ 

1009 A very simple publish/subscribe system. 

1010 """ 

1011 

1012 def __init__(self): 

1013 self._subscribers = {} 

1014 

1015 def add(self, event, subscriber, append=True): 

1016 """ 

1017 Add a subscriber for an event. 

1018 

1019 :param event: The name of an event. 

1020 :param subscriber: The subscriber to be added (and called when the 

1021 event is published). 

1022 :param append: Whether to append or prepend the subscriber to an 

1023 existing subscriber list for the event. 

1024 """ 

1025 subs = self._subscribers 

1026 if event not in subs: 

1027 subs[event] = deque([subscriber]) 

1028 else: 

1029 sq = subs[event] 

1030 if append: 

1031 sq.append(subscriber) 

1032 else: 

1033 sq.appendleft(subscriber) 

1034 

1035 def remove(self, event, subscriber): 

1036 """ 

1037 Remove a subscriber for an event. 

1038 

1039 :param event: The name of an event. 

1040 :param subscriber: The subscriber to be removed. 

1041 """ 

1042 subs = self._subscribers 

1043 if event not in subs: 

1044 raise ValueError('No subscribers: %r' % event) 

1045 subs[event].remove(subscriber) 

1046 

1047 def get_subscribers(self, event): 

1048 """ 

1049 Return an iterator for the subscribers for an event. 

1050 :param event: The event to return subscribers for. 

1051 """ 

1052 return iter(self._subscribers.get(event, ())) 

1053 

1054 def publish(self, event, *args, **kwargs): 

1055 """ 

1056 Publish a event and return a list of values returned by its 

1057 subscribers. 

1058 

1059 :param event: The event to publish. 

1060 :param args: The positional arguments to pass to the event's 

1061 subscribers. 

1062 :param kwargs: The keyword arguments to pass to the event's 

1063 subscribers. 

1064 """ 

1065 result = [] 

1066 for subscriber in self.get_subscribers(event): 

1067 try: 

1068 value = subscriber(event, *args, **kwargs) 

1069 except Exception: 

1070 logger.exception('Exception during event publication') 

1071 value = None 

1072 result.append(value) 

1073 logger.debug('publish %s: args = %s, kwargs = %s, result = %s', event, args, kwargs, result) 

1074 return result 

1075 

1076 

1077# 

1078# Simple sequencing 

1079# 

1080class Sequencer(object): 

1081 

1082 def __init__(self): 

1083 self._preds = {} 

1084 self._succs = {} 

1085 self._nodes = set() # nodes with no preds/succs 

1086 

1087 def add_node(self, node): 

1088 self._nodes.add(node) 

1089 

1090 def remove_node(self, node, edges=False): 

1091 if node in self._nodes: 

1092 self._nodes.remove(node) 

1093 if edges: 

1094 for p in set(self._preds.get(node, ())): 

1095 self.remove(p, node) 

1096 for s in set(self._succs.get(node, ())): 

1097 self.remove(node, s) 

1098 # Remove empties 

1099 for k, v in list(self._preds.items()): 

1100 if not v: 

1101 del self._preds[k] 

1102 for k, v in list(self._succs.items()): 

1103 if not v: 

1104 del self._succs[k] 

1105 

1106 def add(self, pred, succ): 

1107 assert pred != succ 

1108 self._preds.setdefault(succ, set()).add(pred) 

1109 self._succs.setdefault(pred, set()).add(succ) 

1110 

1111 def remove(self, pred, succ): 

1112 assert pred != succ 

1113 try: 

1114 preds = self._preds[succ] 

1115 succs = self._succs[pred] 

1116 except KeyError: # pragma: no cover 

1117 raise ValueError('%r not a successor of anything' % succ) 

1118 try: 

1119 preds.remove(pred) 

1120 succs.remove(succ) 

1121 except KeyError: # pragma: no cover 

1122 raise ValueError('%r not a successor of %r' % (succ, pred)) 

1123 

1124 def is_step(self, step): 

1125 return (step in self._preds or step in self._succs or step in self._nodes) 

1126 

1127 def get_steps(self, final): 

1128 if not self.is_step(final): 

1129 raise ValueError('Unknown: %r' % final) 

1130 result = [] 

1131 todo = [] 

1132 seen = set() 

1133 todo.append(final) 

1134 while todo: 

1135 step = todo.pop(0) 

1136 if step in seen: 

1137 # if a step was already seen, 

1138 # move it to the end (so it will appear earlier 

1139 # when reversed on return) ... but not for the 

1140 # final step, as that would be confusing for 

1141 # users 

1142 if step != final: 

1143 result.remove(step) 

1144 result.append(step) 

1145 else: 

1146 seen.add(step) 

1147 result.append(step) 

1148 preds = self._preds.get(step, ()) 

1149 todo.extend(preds) 

1150 return reversed(result) 

1151 

1152 @property 

1153 def strong_connections(self): 

1154 # http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm 

1155 index_counter = [0] 

1156 stack = [] 

1157 lowlinks = {} 

1158 index = {} 

1159 result = [] 

1160 

1161 graph = self._succs 

1162 

1163 def strongconnect(node): 

1164 # set the depth index for this node to the smallest unused index 

1165 index[node] = index_counter[0] 

1166 lowlinks[node] = index_counter[0] 

1167 index_counter[0] += 1 

1168 stack.append(node) 

1169 

1170 # Consider successors 

1171 try: 

1172 successors = graph[node] 

1173 except Exception: 

1174 successors = [] 

1175 for successor in successors: 

1176 if successor not in lowlinks: 

1177 # Successor has not yet been visited 

1178 strongconnect(successor) 

1179 lowlinks[node] = min(lowlinks[node], lowlinks[successor]) 

1180 elif successor in stack: 

1181 # the successor is in the stack and hence in the current 

1182 # strongly connected component (SCC) 

1183 lowlinks[node] = min(lowlinks[node], index[successor]) 

1184 

1185 # If `node` is a root node, pop the stack and generate an SCC 

1186 if lowlinks[node] == index[node]: 

1187 connected_component = [] 

1188 

1189 while True: 

1190 successor = stack.pop() 

1191 connected_component.append(successor) 

1192 if successor == node: 

1193 break 

1194 component = tuple(connected_component) 

1195 # storing the result 

1196 result.append(component) 

1197 

1198 for node in graph: 

1199 if node not in lowlinks: 

1200 strongconnect(node) 

1201 

1202 return result 

1203 

1204 @property 

1205 def dot(self): 

1206 result = ['digraph G {'] 

1207 for succ in self._preds: 

1208 preds = self._preds[succ] 

1209 for pred in preds: 

1210 result.append(' %s -> %s;' % (pred, succ)) 

1211 for node in self._nodes: 

1212 result.append(' %s;' % node) 

1213 result.append('}') 

1214 return '\n'.join(result) 

1215 

1216 

1217# 

1218# Unarchiving functionality for zip, tar, tgz, tbz, whl 

1219# 

1220 

1221ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz', '.whl') 

1222 

1223 

1224def unarchive(archive_filename, dest_dir, format=None, check=True): 

1225 

1226 def check_path(path): 

1227 if not isinstance(path, text_type): 

1228 path = path.decode('utf-8') 

1229 p = os.path.abspath(os.path.join(dest_dir, path)) 

1230 if not p.startswith(dest_dir) or p[plen] != os.sep: 

1231 raise ValueError('path outside destination: %r' % p) 

1232 

1233 dest_dir = os.path.abspath(dest_dir) 

1234 plen = len(dest_dir) 

1235 archive = None 

1236 if format is None: 

1237 if archive_filename.endswith(('.zip', '.whl')): 

1238 format = 'zip' 

1239 elif archive_filename.endswith(('.tar.gz', '.tgz')): 

1240 format = 'tgz' 

1241 mode = 'r:gz' 

1242 elif archive_filename.endswith(('.tar.bz2', '.tbz')): 

1243 format = 'tbz' 

1244 mode = 'r:bz2' 

1245 elif archive_filename.endswith('.tar'): 

1246 format = 'tar' 

1247 mode = 'r' 

1248 else: # pragma: no cover 

1249 raise ValueError('Unknown format for %r' % archive_filename) 

1250 try: 

1251 if format == 'zip': 

1252 archive = ZipFile(archive_filename, 'r') 

1253 if check: 

1254 names = archive.namelist() 

1255 for name in names: 

1256 check_path(name) 

1257 else: 

1258 archive = tarfile.open(archive_filename, mode) 

1259 if check: 

1260 names = archive.getnames() 

1261 for name in names: 

1262 check_path(name) 

1263 if format != 'zip' and sys.version_info[0] < 3: 

1264 # See Python issue 17153. If the dest path contains Unicode, 

1265 # tarfile extraction fails on Python 2.x if a member path name 

1266 # contains non-ASCII characters - it leads to an implicit 

1267 # bytes -> unicode conversion using ASCII to decode. 

1268 for tarinfo in archive.getmembers(): 

1269 if not isinstance(tarinfo.name, text_type): 

1270 tarinfo.name = tarinfo.name.decode('utf-8') 

1271 

1272 # Limit extraction of dangerous items, if this Python 

1273 # allows it easily. If not, just trust the input. 

1274 # See: https://docs.python.org/3/library/tarfile.html#extraction-filters 

1275 def extraction_filter(member, path): 

1276 """Run tarfile.tar_filter, but raise the expected ValueError""" 

1277 # This is only called if the current Python has tarfile filters 

1278 try: 

1279 return tarfile.tar_filter(member, path) 

1280 except tarfile.FilterError as exc: 

1281 raise ValueError(str(exc)) 

1282 

1283 archive.extraction_filter = extraction_filter 

1284 

1285 archive.extractall(dest_dir) 

1286 

1287 finally: 

1288 if archive: 

1289 archive.close() 

1290 

1291 

1292def zip_dir(directory): 

1293 """zip a directory tree into a BytesIO object""" 

1294 result = io.BytesIO() 

1295 dlen = len(directory) 

1296 with ZipFile(result, "w") as zf: 

1297 for root, dirs, files in os.walk(directory): 

1298 for name in files: 

1299 full = os.path.join(root, name) 

1300 rel = root[dlen:] 

1301 dest = os.path.join(rel, name) 

1302 zf.write(full, dest) 

1303 return result 

1304 

1305 

1306# 

1307# Simple progress bar 

1308# 

1309 

1310UNITS = ('', 'K', 'M', 'G', 'T', 'P') 

1311 

1312 

1313class Progress(object): 

1314 unknown = 'UNKNOWN' 

1315 

1316 def __init__(self, minval=0, maxval=100): 

1317 assert maxval is None or maxval >= minval 

1318 self.min = self.cur = minval 

1319 self.max = maxval 

1320 self.started = None 

1321 self.elapsed = 0 

1322 self.done = False 

1323 

1324 def update(self, curval): 

1325 assert self.min <= curval 

1326 assert self.max is None or curval <= self.max 

1327 self.cur = curval 

1328 now = time.time() 

1329 if self.started is None: 

1330 self.started = now 

1331 else: 

1332 self.elapsed = now - self.started 

1333 

1334 def increment(self, incr): 

1335 assert incr >= 0 

1336 self.update(self.cur + incr) 

1337 

1338 def start(self): 

1339 self.update(self.min) 

1340 return self 

1341 

1342 def stop(self): 

1343 if self.max is not None: 

1344 self.update(self.max) 

1345 self.done = True 

1346 

1347 @property 

1348 def maximum(self): 

1349 return self.unknown if self.max is None else self.max 

1350 

1351 @property 

1352 def percentage(self): 

1353 if self.done: 

1354 result = '100 %' 

1355 elif self.max is None: 

1356 result = ' ?? %' 

1357 else: 

1358 v = 100.0 * (self.cur - self.min) / (self.max - self.min) 

1359 result = '%3d %%' % v 

1360 return result 

1361 

1362 def format_duration(self, duration): 

1363 if (duration <= 0) and self.max is None or self.cur == self.min: 

1364 result = '??:??:??' 

1365 # elif duration < 1: 

1366 # result = '--:--:--' 

1367 else: 

1368 result = time.strftime('%H:%M:%S', time.gmtime(duration)) 

1369 return result 

1370 

1371 @property 

1372 def ETA(self): 

1373 if self.done: 

1374 prefix = 'Done' 

1375 t = self.elapsed 

1376 # import pdb; pdb.set_trace() 

1377 else: 

1378 prefix = 'ETA ' 

1379 if self.max is None: 

1380 t = -1 

1381 elif self.elapsed == 0 or (self.cur == self.min): 

1382 t = 0 

1383 else: 

1384 # import pdb; pdb.set_trace() 

1385 t = float(self.max - self.min) 

1386 t /= self.cur - self.min 

1387 t = (t - 1) * self.elapsed 

1388 return '%s: %s' % (prefix, self.format_duration(t)) 

1389 

1390 @property 

1391 def speed(self): 

1392 if self.elapsed == 0: 

1393 result = 0.0 

1394 else: 

1395 result = (self.cur - self.min) / self.elapsed 

1396 for unit in UNITS: 

1397 if result < 1000: 

1398 break 

1399 result /= 1000.0 

1400 return '%d %sB/s' % (result, unit) 

1401 

1402 

1403# 

1404# Glob functionality 

1405# 

1406 

1407RICH_GLOB = re.compile(r'\{([^}]*)\}') 

1408_CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]') 

1409_CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$') 

1410 

1411 

1412def iglob(path_glob): 

1413 """Extended globbing function that supports ** and {opt1,opt2,opt3}.""" 

1414 if _CHECK_RECURSIVE_GLOB.search(path_glob): 

1415 msg = """invalid glob %r: recursive glob "**" must be used alone""" 

1416 raise ValueError(msg % path_glob) 

1417 if _CHECK_MISMATCH_SET.search(path_glob): 

1418 msg = """invalid glob %r: mismatching set marker '{' or '}'""" 

1419 raise ValueError(msg % path_glob) 

1420 return _iglob(path_glob) 

1421 

1422 

1423def _iglob(path_glob): 

1424 rich_path_glob = RICH_GLOB.split(path_glob, 1) 

1425 if len(rich_path_glob) > 1: 

1426 assert len(rich_path_glob) == 3, rich_path_glob 

1427 prefix, set, suffix = rich_path_glob 

1428 for item in set.split(','): 

1429 for path in _iglob(''.join((prefix, item, suffix))): 

1430 yield path 

1431 else: 

1432 if '**' not in path_glob: 

1433 for item in std_iglob(path_glob): 

1434 yield item 

1435 else: 

1436 prefix, radical = path_glob.split('**', 1) 

1437 if prefix == '': 

1438 prefix = '.' 

1439 if radical == '': 

1440 radical = '*' 

1441 else: 

1442 # we support both 

1443 radical = radical.lstrip('/') 

1444 radical = radical.lstrip('\\') 

1445 for path, dir, files in os.walk(prefix): 

1446 path = os.path.normpath(path) 

1447 for fn in _iglob(os.path.join(path, radical)): 

1448 yield fn 

1449 

1450 

1451if ssl: 

1452 from .compat import (HTTPSHandler as BaseHTTPSHandler, match_hostname, CertificateError) 

1453 

1454 # 

1455 # HTTPSConnection which verifies certificates/matches domains 

1456 # 

1457 

1458 class HTTPSConnection(httplib.HTTPSConnection): 

1459 ca_certs = None # set this to the path to the certs file (.pem) 

1460 check_domain = True # only used if ca_certs is not None 

1461 

1462 # noinspection PyPropertyAccess 

1463 def connect(self): 

1464 sock = socket.create_connection((self.host, self.port), self.timeout) 

1465 if getattr(self, '_tunnel_host', False): 

1466 self.sock = sock 

1467 self._tunnel() 

1468 

1469 context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) 

1470 if hasattr(ssl, 'OP_NO_SSLv2'): 

1471 context.options |= ssl.OP_NO_SSLv2 

1472 if getattr(self, 'cert_file', None): 

1473 context.load_cert_chain(self.cert_file, self.key_file) 

1474 kwargs = {} 

1475 if self.ca_certs: 

1476 context.verify_mode = ssl.CERT_REQUIRED 

1477 context.load_verify_locations(cafile=self.ca_certs) 

1478 if getattr(ssl, 'HAS_SNI', False): 

1479 kwargs['server_hostname'] = self.host 

1480 

1481 self.sock = context.wrap_socket(sock, **kwargs) 

1482 if self.ca_certs and self.check_domain: 

1483 try: 

1484 match_hostname(self.sock.getpeercert(), self.host) 

1485 logger.debug('Host verified: %s', self.host) 

1486 except CertificateError: # pragma: no cover 

1487 self.sock.shutdown(socket.SHUT_RDWR) 

1488 self.sock.close() 

1489 raise 

1490 

1491 class HTTPSHandler(BaseHTTPSHandler): 

1492 

1493 def __init__(self, ca_certs, check_domain=True): 

1494 BaseHTTPSHandler.__init__(self) 

1495 self.ca_certs = ca_certs 

1496 self.check_domain = check_domain 

1497 

1498 def _conn_maker(self, *args, **kwargs): 

1499 """ 

1500 This is called to create a connection instance. Normally you'd 

1501 pass a connection class to do_open, but it doesn't actually check for 

1502 a class, and just expects a callable. As long as we behave just as a 

1503 constructor would have, we should be OK. If it ever changes so that 

1504 we *must* pass a class, we'll create an UnsafeHTTPSConnection class 

1505 which just sets check_domain to False in the class definition, and 

1506 choose which one to pass to do_open. 

1507 """ 

1508 result = HTTPSConnection(*args, **kwargs) 

1509 if self.ca_certs: 

1510 result.ca_certs = self.ca_certs 

1511 result.check_domain = self.check_domain 

1512 return result 

1513 

1514 def https_open(self, req): 

1515 try: 

1516 return self.do_open(self._conn_maker, req) 

1517 except URLError as e: 

1518 if 'certificate verify failed' in str(e.reason): 

1519 raise CertificateError('Unable to verify server certificate ' 

1520 'for %s' % req.host) 

1521 else: 

1522 raise 

1523 

1524 # 

1525 # To prevent against mixing HTTP traffic with HTTPS (examples: A Man-In-The- 

1526 # Middle proxy using HTTP listens on port 443, or an index mistakenly serves 

1527 # HTML containing a http://xyz link when it should be https://xyz), 

1528 # you can use the following handler class, which does not allow HTTP traffic. 

1529 # 

1530 # It works by inheriting from HTTPHandler - so build_opener won't add a 

1531 # handler for HTTP itself. 

1532 # 

1533 class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler): 

1534 

1535 def http_open(self, req): 

1536 raise URLError('Unexpected HTTP request on what should be a secure ' 

1537 'connection: %s' % req) 

1538 

1539 

1540# 

1541# XML-RPC with timeouts 

1542# 

1543class Transport(xmlrpclib.Transport): 

1544 

1545 def __init__(self, timeout, use_datetime=0): 

1546 self.timeout = timeout 

1547 xmlrpclib.Transport.__init__(self, use_datetime) 

1548 

1549 def make_connection(self, host): 

1550 h, eh, x509 = self.get_host_info(host) 

1551 if not self._connection or host != self._connection[0]: 

1552 self._extra_headers = eh 

1553 self._connection = host, httplib.HTTPConnection(h) 

1554 return self._connection[1] 

1555 

1556 

1557if ssl: 

1558 

1559 class SafeTransport(xmlrpclib.SafeTransport): 

1560 

1561 def __init__(self, timeout, use_datetime=0): 

1562 self.timeout = timeout 

1563 xmlrpclib.SafeTransport.__init__(self, use_datetime) 

1564 

1565 def make_connection(self, host): 

1566 h, eh, kwargs = self.get_host_info(host) 

1567 if not kwargs: 

1568 kwargs = {} 

1569 kwargs['timeout'] = self.timeout 

1570 if not self._connection or host != self._connection[0]: 

1571 self._extra_headers = eh 

1572 self._connection = host, httplib.HTTPSConnection(h, None, **kwargs) 

1573 return self._connection[1] 

1574 

1575 

1576class ServerProxy(xmlrpclib.ServerProxy): 

1577 

1578 def __init__(self, uri, **kwargs): 

1579 self.timeout = timeout = kwargs.pop('timeout', None) 

1580 # The above classes only come into play if a timeout 

1581 # is specified 

1582 if timeout is not None: 

1583 # scheme = splittype(uri) # deprecated as of Python 3.8 

1584 scheme = urlparse(uri)[0] 

1585 use_datetime = kwargs.get('use_datetime', 0) 

1586 if scheme == 'https': 

1587 tcls = SafeTransport 

1588 else: 

1589 tcls = Transport 

1590 kwargs['transport'] = t = tcls(timeout, use_datetime=use_datetime) 

1591 self.transport = t 

1592 xmlrpclib.ServerProxy.__init__(self, uri, **kwargs) 

1593 

1594 

1595# 

1596# CSV functionality. This is provided because on 2.x, the csv module can't 

1597# handle Unicode. However, we need to deal with Unicode in e.g. RECORD files. 

1598# 

1599 

1600 

1601def _csv_open(fn, mode, **kwargs): 

1602 if sys.version_info[0] < 3: 

1603 mode += 'b' 

1604 else: 

1605 kwargs['newline'] = '' 

1606 # Python 3 determines encoding from locale. Force 'utf-8' 

1607 # file encoding to match other forced utf-8 encoding 

1608 kwargs['encoding'] = 'utf-8' 

1609 return open(fn, mode, **kwargs) 

1610 

1611 

1612class CSVBase(object): 

1613 defaults = { 

1614 'delimiter': str(','), # The strs are used because we need native 

1615 'quotechar': str('"'), # str in the csv API (2.x won't take 

1616 'lineterminator': str('\n') # Unicode) 

1617 } 

1618 

1619 def __enter__(self): 

1620 return self 

1621 

1622 def __exit__(self, *exc_info): 

1623 self.stream.close() 

1624 

1625 

1626class CSVReader(CSVBase): 

1627 

1628 def __init__(self, **kwargs): 

1629 if 'stream' in kwargs: 

1630 stream = kwargs['stream'] 

1631 if sys.version_info[0] >= 3: 

1632 # needs to be a text stream 

1633 stream = codecs.getreader('utf-8')(stream) 

1634 self.stream = stream 

1635 else: 

1636 self.stream = _csv_open(kwargs['path'], 'r') 

1637 self.reader = csv.reader(self.stream, **self.defaults) 

1638 

1639 def __iter__(self): 

1640 return self 

1641 

1642 def next(self): 

1643 result = next(self.reader) 

1644 if sys.version_info[0] < 3: 

1645 for i, item in enumerate(result): 

1646 if not isinstance(item, text_type): 

1647 result[i] = item.decode('utf-8') 

1648 return result 

1649 

1650 __next__ = next 

1651 

1652 

1653class CSVWriter(CSVBase): 

1654 

1655 def __init__(self, fn, **kwargs): 

1656 self.stream = _csv_open(fn, 'w') 

1657 self.writer = csv.writer(self.stream, **self.defaults) 

1658 

1659 def writerow(self, row): 

1660 if sys.version_info[0] < 3: 

1661 r = [] 

1662 for item in row: 

1663 if isinstance(item, text_type): 

1664 item = item.encode('utf-8') 

1665 r.append(item) 

1666 row = r 

1667 self.writer.writerow(row) 

1668 

1669 

1670# 

1671# Configurator functionality 

1672# 

1673 

1674 

1675class Configurator(BaseConfigurator): 

1676 

1677 value_converters = dict(BaseConfigurator.value_converters) 

1678 value_converters['inc'] = 'inc_convert' 

1679 

1680 def __init__(self, config, base=None): 

1681 super(Configurator, self).__init__(config) 

1682 self.base = base or os.getcwd() 

1683 

1684 def configure_custom(self, config): 

1685 

1686 def convert(o): 

1687 if isinstance(o, (list, tuple)): 

1688 result = type(o)([convert(i) for i in o]) 

1689 elif isinstance(o, dict): 

1690 if '()' in o: 

1691 result = self.configure_custom(o) 

1692 else: 

1693 result = {} 

1694 for k in o: 

1695 result[k] = convert(o[k]) 

1696 else: 

1697 result = self.convert(o) 

1698 return result 

1699 

1700 c = config.pop('()') 

1701 if not callable(c): 

1702 c = self.resolve(c) 

1703 props = config.pop('.', None) 

1704 # Check for valid identifiers 

1705 args = config.pop('[]', ()) 

1706 if args: 

1707 args = tuple([convert(o) for o in args]) 

1708 items = [(k, convert(config[k])) for k in config if valid_ident(k)] 

1709 kwargs = dict(items) 

1710 result = c(*args, **kwargs) 

1711 if props: 

1712 for n, v in props.items(): 

1713 setattr(result, n, convert(v)) 

1714 return result 

1715 

1716 def __getitem__(self, key): 

1717 result = self.config[key] 

1718 if isinstance(result, dict) and '()' in result: 

1719 self.config[key] = result = self.configure_custom(result) 

1720 return result 

1721 

1722 def inc_convert(self, value): 

1723 """Default converter for the inc:// protocol.""" 

1724 if not os.path.isabs(value): 

1725 value = os.path.join(self.base, value) 

1726 with codecs.open(value, 'r', encoding='utf-8') as f: 

1727 result = json.load(f) 

1728 return result 

1729 

1730 

1731class SubprocessMixin(object): 

1732 """ 

1733 Mixin for running subprocesses and capturing their output 

1734 """ 

1735 

1736 def __init__(self, verbose=False, progress=None): 

1737 self.verbose = verbose 

1738 self.progress = progress 

1739 

1740 def reader(self, stream, context): 

1741 """ 

1742 Read lines from a subprocess' output stream and either pass to a progress 

1743 callable (if specified) or write progress information to sys.stderr. 

1744 """ 

1745 progress = self.progress 

1746 verbose = self.verbose 

1747 while True: 

1748 s = stream.readline() 

1749 if not s: 

1750 break 

1751 if progress is not None: 

1752 progress(s, context) 

1753 else: 

1754 if not verbose: 

1755 sys.stderr.write('.') 

1756 else: 

1757 sys.stderr.write(s.decode('utf-8')) 

1758 sys.stderr.flush() 

1759 stream.close() 

1760 

1761 def run_command(self, cmd, **kwargs): 

1762 p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs) 

1763 t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout')) 

1764 t1.start() 

1765 t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr')) 

1766 t2.start() 

1767 p.wait() 

1768 t1.join() 

1769 t2.join() 

1770 if self.progress is not None: 

1771 self.progress('done.', 'main') 

1772 elif self.verbose: 

1773 sys.stderr.write('done.\n') 

1774 return p 

1775 

1776 

1777def normalize_name(name): 

1778 """Normalize a python package name a la PEP 503""" 

1779 # https://www.python.org/dev/peps/pep-0503/#normalized-names 

1780 return re.sub('[-_.]+', '-', name).lower() 

1781 

1782 

1783# def _get_pypirc_command(): 

1784# """ 

1785# Get the distutils command for interacting with PyPI configurations. 

1786# :return: the command. 

1787# """ 

1788# from distutils.core import Distribution 

1789# from distutils.config import PyPIRCCommand 

1790# d = Distribution() 

1791# return PyPIRCCommand(d) 

1792 

1793 

1794class PyPIRCFile(object): 

1795 

1796 DEFAULT_REPOSITORY = 'https://upload.pypi.org/legacy/' 

1797 DEFAULT_REALM = 'pypi' 

1798 

1799 def __init__(self, fn=None, url=None): 

1800 if fn is None: 

1801 fn = os.path.join(os.path.expanduser('~'), '.pypirc') 

1802 self.filename = fn 

1803 self.url = url 

1804 

1805 def read(self): 

1806 result = {} 

1807 

1808 if os.path.exists(self.filename): 

1809 repository = self.url or self.DEFAULT_REPOSITORY 

1810 

1811 config = configparser.RawConfigParser() 

1812 config.read(self.filename) 

1813 sections = config.sections() 

1814 if 'distutils' in sections: 

1815 # let's get the list of servers 

1816 index_servers = config.get('distutils', 'index-servers') 

1817 _servers = [server.strip() for server in index_servers.split('\n') if server.strip() != ''] 

1818 if _servers == []: 

1819 # nothing set, let's try to get the default pypi 

1820 if 'pypi' in sections: 

1821 _servers = ['pypi'] 

1822 else: 

1823 for server in _servers: 

1824 result = {'server': server} 

1825 result['username'] = config.get(server, 'username') 

1826 

1827 # optional params 

1828 for key, default in (('repository', self.DEFAULT_REPOSITORY), ('realm', self.DEFAULT_REALM), 

1829 ('password', None)): 

1830 if config.has_option(server, key): 

1831 result[key] = config.get(server, key) 

1832 else: 

1833 result[key] = default 

1834 

1835 # work around people having "repository" for the "pypi" 

1836 # section of their config set to the HTTP (rather than 

1837 # HTTPS) URL 

1838 if (server == 'pypi' and repository in (self.DEFAULT_REPOSITORY, 'pypi')): 

1839 result['repository'] = self.DEFAULT_REPOSITORY 

1840 elif (result['server'] != repository and result['repository'] != repository): 

1841 result = {} 

1842 elif 'server-login' in sections: 

1843 # old format 

1844 server = 'server-login' 

1845 if config.has_option(server, 'repository'): 

1846 repository = config.get(server, 'repository') 

1847 else: 

1848 repository = self.DEFAULT_REPOSITORY 

1849 result = { 

1850 'username': config.get(server, 'username'), 

1851 'password': config.get(server, 'password'), 

1852 'repository': repository, 

1853 'server': server, 

1854 'realm': self.DEFAULT_REALM 

1855 } 

1856 return result 

1857 

1858 def update(self, username, password): 

1859 # import pdb; pdb.set_trace() 

1860 config = configparser.RawConfigParser() 

1861 fn = self.filename 

1862 config.read(fn) 

1863 if not config.has_section('pypi'): 

1864 config.add_section('pypi') 

1865 config.set('pypi', 'username', username) 

1866 config.set('pypi', 'password', password) 

1867 with open(fn, 'w') as f: 

1868 config.write(f) 

1869 

1870 

1871def _load_pypirc(index): 

1872 """ 

1873 Read the PyPI access configuration as supported by distutils. 

1874 """ 

1875 return PyPIRCFile(url=index.url).read() 

1876 

1877 

1878def _store_pypirc(index): 

1879 PyPIRCFile().update(index.username, index.password) 

1880 

1881 

1882# 

1883# get_platform()/get_host_platform() copied from Python 3.10.a0 source, with some minor 

1884# tweaks 

1885# 

1886 

1887 

1888def get_host_platform(): 

1889 """Return a string that identifies the current platform. This is used mainly to 

1890 distinguish platform-specific build directories and platform-specific built 

1891 distributions. Typically includes the OS name and version and the 

1892 architecture (as supplied by 'os.uname()'), although the exact information 

1893 included depends on the OS; eg. on Linux, the kernel version isn't 

1894 particularly important. 

1895 

1896 Examples of returned values: 

1897 linux-i586 

1898 linux-alpha (?) 

1899 solaris-2.6-sun4u 

1900 

1901 Windows will return one of: 

1902 win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc) 

1903 win32 (all others - specifically, sys.platform is returned) 

1904 

1905 For other non-POSIX platforms, currently just returns 'sys.platform'. 

1906 

1907 """ 

1908 if os.name == 'nt': 

1909 if 'amd64' in sys.version.lower(): 

1910 return 'win-amd64' 

1911 if '(arm)' in sys.version.lower(): 

1912 return 'win-arm32' 

1913 if '(arm64)' in sys.version.lower(): 

1914 return 'win-arm64' 

1915 return sys.platform 

1916 

1917 # Set for cross builds explicitly 

1918 if "_PYTHON_HOST_PLATFORM" in os.environ: 

1919 return os.environ["_PYTHON_HOST_PLATFORM"] 

1920 

1921 if os.name != 'posix' or not hasattr(os, 'uname'): 

1922 # XXX what about the architecture? NT is Intel or Alpha, 

1923 # Mac OS is M68k or PPC, etc. 

1924 return sys.platform 

1925 

1926 # Try to distinguish various flavours of Unix 

1927 

1928 (osname, host, release, version, machine) = os.uname() 

1929 

1930 # Convert the OS name to lowercase, remove '/' characters, and translate 

1931 # spaces (for "Power Macintosh") 

1932 osname = osname.lower().replace('/', '') 

1933 machine = machine.replace(' ', '_').replace('/', '-') 

1934 

1935 if osname[:5] == 'linux': 

1936 # At least on Linux/Intel, 'machine' is the processor -- 

1937 # i386, etc. 

1938 # XXX what about Alpha, SPARC, etc? 

1939 return "%s-%s" % (osname, machine) 

1940 

1941 elif osname[:5] == 'sunos': 

1942 if release[0] >= '5': # SunOS 5 == Solaris 2 

1943 osname = 'solaris' 

1944 release = '%d.%s' % (int(release[0]) - 3, release[2:]) 

1945 # We can't use 'platform.architecture()[0]' because a 

1946 # bootstrap problem. We use a dict to get an error 

1947 # if some suspicious happens. 

1948 bitness = {2147483647: '32bit', 9223372036854775807: '64bit'} 

1949 machine += '.%s' % bitness[sys.maxsize] 

1950 # fall through to standard osname-release-machine representation 

1951 elif osname[:3] == 'aix': 

1952 from _aix_support import aix_platform 

1953 return aix_platform() 

1954 elif osname[:6] == 'cygwin': 

1955 osname = 'cygwin' 

1956 rel_re = re.compile(r'[\d.]+', re.ASCII) 

1957 m = rel_re.match(release) 

1958 if m: 

1959 release = m.group() 

1960 elif osname[:6] == 'darwin': 

1961 import _osx_support 

1962 try: 

1963 from distutils import sysconfig 

1964 except ImportError: 

1965 import sysconfig 

1966 osname, release, machine = _osx_support.get_platform_osx(sysconfig.get_config_vars(), osname, release, machine) 

1967 

1968 return '%s-%s-%s' % (osname, release, machine) 

1969 

1970 

1971_TARGET_TO_PLAT = { 

1972 'x86': 'win32', 

1973 'x64': 'win-amd64', 

1974 'arm': 'win-arm32', 

1975} 

1976 

1977 

1978def get_platform(): 

1979 if os.name != 'nt': 

1980 return get_host_platform() 

1981 cross_compilation_target = os.environ.get('VSCMD_ARG_TGT_ARCH') 

1982 if cross_compilation_target not in _TARGET_TO_PLAT: 

1983 return get_host_platform() 

1984 return _TARGET_TO_PLAT[cross_compilation_target]