Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/pip/_vendor/distlib/util.py: 16%
1255 statements
« prev ^ index » next coverage.py v7.2.7, created at 2023-06-07 06:48 +0000
« prev ^ index » next coverage.py v7.2.7, created at 2023-06-07 06:48 +0000
1#
2# Copyright (C) 2012-2021 The Python Software Foundation.
3# See LICENSE.txt and CONTRIBUTORS.txt.
4#
5import codecs
6from collections import deque
7import contextlib
8import csv
9from glob import iglob as std_iglob
10import io
11import json
12import logging
13import os
14import py_compile
15import re
16import socket
17try:
18 import ssl
19except ImportError: # pragma: no cover
20 ssl = None
21import subprocess
22import sys
23import tarfile
24import tempfile
25import textwrap
27try:
28 import threading
29except ImportError: # pragma: no cover
30 import dummy_threading as threading
31import time
33from . import DistlibException
34from .compat import (string_types, text_type, shutil, raw_input, StringIO,
35 cache_from_source, urlopen, urljoin, httplib, xmlrpclib,
36 splittype, HTTPHandler, BaseConfigurator, valid_ident,
37 Container, configparser, URLError, ZipFile, fsdecode,
38 unquote, urlparse)
40logger = logging.getLogger(__name__)
42#
43# Requirement parsing code as per PEP 508
44#
46IDENTIFIER = re.compile(r'^([\w\.-]+)\s*')
47VERSION_IDENTIFIER = re.compile(r'^([\w\.*+-]+)\s*')
48COMPARE_OP = re.compile(r'^(<=?|>=?|={2,3}|[~!]=)\s*')
49MARKER_OP = re.compile(r'^((<=?)|(>=?)|={2,3}|[~!]=|in|not\s+in)\s*')
50OR = re.compile(r'^or\b\s*')
51AND = re.compile(r'^and\b\s*')
52NON_SPACE = re.compile(r'(\S+)\s*')
53STRING_CHUNK = re.compile(r'([\s\w\.{}()*+#:;,/?!~`@$%^&=|<>\[\]-]+)')
56def parse_marker(marker_string):
57 """
58 Parse a marker string and return a dictionary containing a marker expression.
60 The dictionary will contain keys "op", "lhs" and "rhs" for non-terminals in
61 the expression grammar, or strings. A string contained in quotes is to be
62 interpreted as a literal string, and a string not contained in quotes is a
63 variable (such as os_name).
64 """
65 def marker_var(remaining):
66 # either identifier, or literal string
67 m = IDENTIFIER.match(remaining)
68 if m:
69 result = m.groups()[0]
70 remaining = remaining[m.end():]
71 elif not remaining:
72 raise SyntaxError('unexpected end of input')
73 else:
74 q = remaining[0]
75 if q not in '\'"':
76 raise SyntaxError('invalid expression: %s' % remaining)
77 oq = '\'"'.replace(q, '')
78 remaining = remaining[1:]
79 parts = [q]
80 while remaining:
81 # either a string chunk, or oq, or q to terminate
82 if remaining[0] == q:
83 break
84 elif remaining[0] == oq:
85 parts.append(oq)
86 remaining = remaining[1:]
87 else:
88 m = STRING_CHUNK.match(remaining)
89 if not m:
90 raise SyntaxError('error in string literal: %s' % remaining)
91 parts.append(m.groups()[0])
92 remaining = remaining[m.end():]
93 else:
94 s = ''.join(parts)
95 raise SyntaxError('unterminated string: %s' % s)
96 parts.append(q)
97 result = ''.join(parts)
98 remaining = remaining[1:].lstrip() # skip past closing quote
99 return result, remaining
101 def marker_expr(remaining):
102 if remaining and remaining[0] == '(':
103 result, remaining = marker(remaining[1:].lstrip())
104 if remaining[0] != ')':
105 raise SyntaxError('unterminated parenthesis: %s' % remaining)
106 remaining = remaining[1:].lstrip()
107 else:
108 lhs, remaining = marker_var(remaining)
109 while remaining:
110 m = MARKER_OP.match(remaining)
111 if not m:
112 break
113 op = m.groups()[0]
114 remaining = remaining[m.end():]
115 rhs, remaining = marker_var(remaining)
116 lhs = {'op': op, 'lhs': lhs, 'rhs': rhs}
117 result = lhs
118 return result, remaining
120 def marker_and(remaining):
121 lhs, remaining = marker_expr(remaining)
122 while remaining:
123 m = AND.match(remaining)
124 if not m:
125 break
126 remaining = remaining[m.end():]
127 rhs, remaining = marker_expr(remaining)
128 lhs = {'op': 'and', 'lhs': lhs, 'rhs': rhs}
129 return lhs, remaining
131 def marker(remaining):
132 lhs, remaining = marker_and(remaining)
133 while remaining:
134 m = OR.match(remaining)
135 if not m:
136 break
137 remaining = remaining[m.end():]
138 rhs, remaining = marker_and(remaining)
139 lhs = {'op': 'or', 'lhs': lhs, 'rhs': rhs}
140 return lhs, remaining
142 return marker(marker_string)
145def parse_requirement(req):
146 """
147 Parse a requirement passed in as a string. Return a Container
148 whose attributes contain the various parts of the requirement.
149 """
150 remaining = req.strip()
151 if not remaining or remaining.startswith('#'):
152 return None
153 m = IDENTIFIER.match(remaining)
154 if not m:
155 raise SyntaxError('name expected: %s' % remaining)
156 distname = m.groups()[0]
157 remaining = remaining[m.end():]
158 extras = mark_expr = versions = uri = None
159 if remaining and remaining[0] == '[':
160 i = remaining.find(']', 1)
161 if i < 0:
162 raise SyntaxError('unterminated extra: %s' % remaining)
163 s = remaining[1:i]
164 remaining = remaining[i + 1:].lstrip()
165 extras = []
166 while s:
167 m = IDENTIFIER.match(s)
168 if not m:
169 raise SyntaxError('malformed extra: %s' % s)
170 extras.append(m.groups()[0])
171 s = s[m.end():]
172 if not s:
173 break
174 if s[0] != ',':
175 raise SyntaxError('comma expected in extras: %s' % s)
176 s = s[1:].lstrip()
177 if not extras:
178 extras = None
179 if remaining:
180 if remaining[0] == '@':
181 # it's a URI
182 remaining = remaining[1:].lstrip()
183 m = NON_SPACE.match(remaining)
184 if not m:
185 raise SyntaxError('invalid URI: %s' % remaining)
186 uri = m.groups()[0]
187 t = urlparse(uri)
188 # there are issues with Python and URL parsing, so this test
189 # is a bit crude. See bpo-20271, bpo-23505. Python doesn't
190 # always parse invalid URLs correctly - it should raise
191 # exceptions for malformed URLs
192 if not (t.scheme and t.netloc):
193 raise SyntaxError('Invalid URL: %s' % uri)
194 remaining = remaining[m.end():].lstrip()
195 else:
197 def get_versions(ver_remaining):
198 """
199 Return a list of operator, version tuples if any are
200 specified, else None.
201 """
202 m = COMPARE_OP.match(ver_remaining)
203 versions = None
204 if m:
205 versions = []
206 while True:
207 op = m.groups()[0]
208 ver_remaining = ver_remaining[m.end():]
209 m = VERSION_IDENTIFIER.match(ver_remaining)
210 if not m:
211 raise SyntaxError('invalid version: %s' % ver_remaining)
212 v = m.groups()[0]
213 versions.append((op, v))
214 ver_remaining = ver_remaining[m.end():]
215 if not ver_remaining or ver_remaining[0] != ',':
216 break
217 ver_remaining = ver_remaining[1:].lstrip()
218 # Some packages have a trailing comma which would break things
219 # See issue #148
220 if not ver_remaining:
221 break
222 m = COMPARE_OP.match(ver_remaining)
223 if not m:
224 raise SyntaxError('invalid constraint: %s' % ver_remaining)
225 if not versions:
226 versions = None
227 return versions, ver_remaining
229 if remaining[0] != '(':
230 versions, remaining = get_versions(remaining)
231 else:
232 i = remaining.find(')', 1)
233 if i < 0:
234 raise SyntaxError('unterminated parenthesis: %s' % remaining)
235 s = remaining[1:i]
236 remaining = remaining[i + 1:].lstrip()
237 # As a special diversion from PEP 508, allow a version number
238 # a.b.c in parentheses as a synonym for ~= a.b.c (because this
239 # is allowed in earlier PEPs)
240 if COMPARE_OP.match(s):
241 versions, _ = get_versions(s)
242 else:
243 m = VERSION_IDENTIFIER.match(s)
244 if not m:
245 raise SyntaxError('invalid constraint: %s' % s)
246 v = m.groups()[0]
247 s = s[m.end():].lstrip()
248 if s:
249 raise SyntaxError('invalid constraint: %s' % s)
250 versions = [('~=', v)]
252 if remaining:
253 if remaining[0] != ';':
254 raise SyntaxError('invalid requirement: %s' % remaining)
255 remaining = remaining[1:].lstrip()
257 mark_expr, remaining = parse_marker(remaining)
259 if remaining and remaining[0] != '#':
260 raise SyntaxError('unexpected trailing data: %s' % remaining)
262 if not versions:
263 rs = distname
264 else:
265 rs = '%s %s' % (distname, ', '.join(['%s %s' % con for con in versions]))
266 return Container(name=distname, extras=extras, constraints=versions,
267 marker=mark_expr, url=uri, requirement=rs)
270def get_resources_dests(resources_root, rules):
271 """Find destinations for resources files"""
273 def get_rel_path(root, path):
274 # normalizes and returns a lstripped-/-separated path
275 root = root.replace(os.path.sep, '/')
276 path = path.replace(os.path.sep, '/')
277 assert path.startswith(root)
278 return path[len(root):].lstrip('/')
280 destinations = {}
281 for base, suffix, dest in rules:
282 prefix = os.path.join(resources_root, base)
283 for abs_base in iglob(prefix):
284 abs_glob = os.path.join(abs_base, suffix)
285 for abs_path in iglob(abs_glob):
286 resource_file = get_rel_path(resources_root, abs_path)
287 if dest is None: # remove the entry if it was here
288 destinations.pop(resource_file, None)
289 else:
290 rel_path = get_rel_path(abs_base, abs_path)
291 rel_dest = dest.replace(os.path.sep, '/').rstrip('/')
292 destinations[resource_file] = rel_dest + '/' + rel_path
293 return destinations
296def in_venv():
297 if hasattr(sys, 'real_prefix'):
298 # virtualenv venvs
299 result = True
300 else:
301 # PEP 405 venvs
302 result = sys.prefix != getattr(sys, 'base_prefix', sys.prefix)
303 return result
306def get_executable():
307# The __PYVENV_LAUNCHER__ dance is apparently no longer needed, as
308# changes to the stub launcher mean that sys.executable always points
309# to the stub on OS X
310# if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__'
311# in os.environ):
312# result = os.environ['__PYVENV_LAUNCHER__']
313# else:
314# result = sys.executable
315# return result
316 # Avoid normcasing: see issue #143
317 # result = os.path.normcase(sys.executable)
318 result = sys.executable
319 if not isinstance(result, text_type):
320 result = fsdecode(result)
321 return result
324def proceed(prompt, allowed_chars, error_prompt=None, default=None):
325 p = prompt
326 while True:
327 s = raw_input(p)
328 p = prompt
329 if not s and default:
330 s = default
331 if s:
332 c = s[0].lower()
333 if c in allowed_chars:
334 break
335 if error_prompt:
336 p = '%c: %s\n%s' % (c, error_prompt, prompt)
337 return c
340def extract_by_key(d, keys):
341 if isinstance(keys, string_types):
342 keys = keys.split()
343 result = {}
344 for key in keys:
345 if key in d:
346 result[key] = d[key]
347 return result
349def read_exports(stream):
350 if sys.version_info[0] >= 3:
351 # needs to be a text stream
352 stream = codecs.getreader('utf-8')(stream)
353 # Try to load as JSON, falling back on legacy format
354 data = stream.read()
355 stream = StringIO(data)
356 try:
357 jdata = json.load(stream)
358 result = jdata['extensions']['python.exports']['exports']
359 for group, entries in result.items():
360 for k, v in entries.items():
361 s = '%s = %s' % (k, v)
362 entry = get_export_entry(s)
363 assert entry is not None
364 entries[k] = entry
365 return result
366 except Exception:
367 stream.seek(0, 0)
369 def read_stream(cp, stream):
370 if hasattr(cp, 'read_file'):
371 cp.read_file(stream)
372 else:
373 cp.readfp(stream)
375 cp = configparser.ConfigParser()
376 try:
377 read_stream(cp, stream)
378 except configparser.MissingSectionHeaderError:
379 stream.close()
380 data = textwrap.dedent(data)
381 stream = StringIO(data)
382 read_stream(cp, stream)
384 result = {}
385 for key in cp.sections():
386 result[key] = entries = {}
387 for name, value in cp.items(key):
388 s = '%s = %s' % (name, value)
389 entry = get_export_entry(s)
390 assert entry is not None
391 #entry.dist = self
392 entries[name] = entry
393 return result
396def write_exports(exports, stream):
397 if sys.version_info[0] >= 3:
398 # needs to be a text stream
399 stream = codecs.getwriter('utf-8')(stream)
400 cp = configparser.ConfigParser()
401 for k, v in exports.items():
402 # TODO check k, v for valid values
403 cp.add_section(k)
404 for entry in v.values():
405 if entry.suffix is None:
406 s = entry.prefix
407 else:
408 s = '%s:%s' % (entry.prefix, entry.suffix)
409 if entry.flags:
410 s = '%s [%s]' % (s, ', '.join(entry.flags))
411 cp.set(k, entry.name, s)
412 cp.write(stream)
415@contextlib.contextmanager
416def tempdir():
417 td = tempfile.mkdtemp()
418 try:
419 yield td
420 finally:
421 shutil.rmtree(td)
423@contextlib.contextmanager
424def chdir(d):
425 cwd = os.getcwd()
426 try:
427 os.chdir(d)
428 yield
429 finally:
430 os.chdir(cwd)
433@contextlib.contextmanager
434def socket_timeout(seconds=15):
435 cto = socket.getdefaulttimeout()
436 try:
437 socket.setdefaulttimeout(seconds)
438 yield
439 finally:
440 socket.setdefaulttimeout(cto)
443class cached_property(object):
444 def __init__(self, func):
445 self.func = func
446 #for attr in ('__name__', '__module__', '__doc__'):
447 # setattr(self, attr, getattr(func, attr, None))
449 def __get__(self, obj, cls=None):
450 if obj is None:
451 return self
452 value = self.func(obj)
453 object.__setattr__(obj, self.func.__name__, value)
454 #obj.__dict__[self.func.__name__] = value = self.func(obj)
455 return value
457def convert_path(pathname):
458 """Return 'pathname' as a name that will work on the native filesystem.
460 The path is split on '/' and put back together again using the current
461 directory separator. Needed because filenames in the setup script are
462 always supplied in Unix style, and have to be converted to the local
463 convention before we can actually use them in the filesystem. Raises
464 ValueError on non-Unix-ish systems if 'pathname' either starts or
465 ends with a slash.
466 """
467 if os.sep == '/':
468 return pathname
469 if not pathname:
470 return pathname
471 if pathname[0] == '/':
472 raise ValueError("path '%s' cannot be absolute" % pathname)
473 if pathname[-1] == '/':
474 raise ValueError("path '%s' cannot end with '/'" % pathname)
476 paths = pathname.split('/')
477 while os.curdir in paths:
478 paths.remove(os.curdir)
479 if not paths:
480 return os.curdir
481 return os.path.join(*paths)
484class FileOperator(object):
485 def __init__(self, dry_run=False):
486 self.dry_run = dry_run
487 self.ensured = set()
488 self._init_record()
490 def _init_record(self):
491 self.record = False
492 self.files_written = set()
493 self.dirs_created = set()
495 def record_as_written(self, path):
496 if self.record:
497 self.files_written.add(path)
499 def newer(self, source, target):
500 """Tell if the target is newer than the source.
502 Returns true if 'source' exists and is more recently modified than
503 'target', or if 'source' exists and 'target' doesn't.
505 Returns false if both exist and 'target' is the same age or younger
506 than 'source'. Raise PackagingFileError if 'source' does not exist.
508 Note that this test is not very accurate: files created in the same
509 second will have the same "age".
510 """
511 if not os.path.exists(source):
512 raise DistlibException("file '%r' does not exist" %
513 os.path.abspath(source))
514 if not os.path.exists(target):
515 return True
517 return os.stat(source).st_mtime > os.stat(target).st_mtime
519 def copy_file(self, infile, outfile, check=True):
520 """Copy a file respecting dry-run and force flags.
521 """
522 self.ensure_dir(os.path.dirname(outfile))
523 logger.info('Copying %s to %s', infile, outfile)
524 if not self.dry_run:
525 msg = None
526 if check:
527 if os.path.islink(outfile):
528 msg = '%s is a symlink' % outfile
529 elif os.path.exists(outfile) and not os.path.isfile(outfile):
530 msg = '%s is a non-regular file' % outfile
531 if msg:
532 raise ValueError(msg + ' which would be overwritten')
533 shutil.copyfile(infile, outfile)
534 self.record_as_written(outfile)
536 def copy_stream(self, instream, outfile, encoding=None):
537 assert not os.path.isdir(outfile)
538 self.ensure_dir(os.path.dirname(outfile))
539 logger.info('Copying stream %s to %s', instream, outfile)
540 if not self.dry_run:
541 if encoding is None:
542 outstream = open(outfile, 'wb')
543 else:
544 outstream = codecs.open(outfile, 'w', encoding=encoding)
545 try:
546 shutil.copyfileobj(instream, outstream)
547 finally:
548 outstream.close()
549 self.record_as_written(outfile)
551 def write_binary_file(self, path, data):
552 self.ensure_dir(os.path.dirname(path))
553 if not self.dry_run:
554 if os.path.exists(path):
555 os.remove(path)
556 with open(path, 'wb') as f:
557 f.write(data)
558 self.record_as_written(path)
560 def write_text_file(self, path, data, encoding):
561 self.write_binary_file(path, data.encode(encoding))
563 def set_mode(self, bits, mask, files):
564 if os.name == 'posix' or (os.name == 'java' and os._name == 'posix'):
565 # Set the executable bits (owner, group, and world) on
566 # all the files specified.
567 for f in files:
568 if self.dry_run:
569 logger.info("changing mode of %s", f)
570 else:
571 mode = (os.stat(f).st_mode | bits) & mask
572 logger.info("changing mode of %s to %o", f, mode)
573 os.chmod(f, mode)
575 set_executable_mode = lambda s, f: s.set_mode(0o555, 0o7777, f)
577 def ensure_dir(self, path):
578 path = os.path.abspath(path)
579 if path not in self.ensured and not os.path.exists(path):
580 self.ensured.add(path)
581 d, f = os.path.split(path)
582 self.ensure_dir(d)
583 logger.info('Creating %s' % path)
584 if not self.dry_run:
585 os.mkdir(path)
586 if self.record:
587 self.dirs_created.add(path)
589 def byte_compile(self, path, optimize=False, force=False, prefix=None, hashed_invalidation=False):
590 dpath = cache_from_source(path, not optimize)
591 logger.info('Byte-compiling %s to %s', path, dpath)
592 if not self.dry_run:
593 if force or self.newer(path, dpath):
594 if not prefix:
595 diagpath = None
596 else:
597 assert path.startswith(prefix)
598 diagpath = path[len(prefix):]
599 compile_kwargs = {}
600 if hashed_invalidation and hasattr(py_compile, 'PycInvalidationMode'):
601 compile_kwargs['invalidation_mode'] = py_compile.PycInvalidationMode.CHECKED_HASH
602 py_compile.compile(path, dpath, diagpath, True, **compile_kwargs) # raise error
603 self.record_as_written(dpath)
604 return dpath
606 def ensure_removed(self, path):
607 if os.path.exists(path):
608 if os.path.isdir(path) and not os.path.islink(path):
609 logger.debug('Removing directory tree at %s', path)
610 if not self.dry_run:
611 shutil.rmtree(path)
612 if self.record:
613 if path in self.dirs_created:
614 self.dirs_created.remove(path)
615 else:
616 if os.path.islink(path):
617 s = 'link'
618 else:
619 s = 'file'
620 logger.debug('Removing %s %s', s, path)
621 if not self.dry_run:
622 os.remove(path)
623 if self.record:
624 if path in self.files_written:
625 self.files_written.remove(path)
627 def is_writable(self, path):
628 result = False
629 while not result:
630 if os.path.exists(path):
631 result = os.access(path, os.W_OK)
632 break
633 parent = os.path.dirname(path)
634 if parent == path:
635 break
636 path = parent
637 return result
639 def commit(self):
640 """
641 Commit recorded changes, turn off recording, return
642 changes.
643 """
644 assert self.record
645 result = self.files_written, self.dirs_created
646 self._init_record()
647 return result
649 def rollback(self):
650 if not self.dry_run:
651 for f in list(self.files_written):
652 if os.path.exists(f):
653 os.remove(f)
654 # dirs should all be empty now, except perhaps for
655 # __pycache__ subdirs
656 # reverse so that subdirs appear before their parents
657 dirs = sorted(self.dirs_created, reverse=True)
658 for d in dirs:
659 flist = os.listdir(d)
660 if flist:
661 assert flist == ['__pycache__']
662 sd = os.path.join(d, flist[0])
663 os.rmdir(sd)
664 os.rmdir(d) # should fail if non-empty
665 self._init_record()
667def resolve(module_name, dotted_path):
668 if module_name in sys.modules:
669 mod = sys.modules[module_name]
670 else:
671 mod = __import__(module_name)
672 if dotted_path is None:
673 result = mod
674 else:
675 parts = dotted_path.split('.')
676 result = getattr(mod, parts.pop(0))
677 for p in parts:
678 result = getattr(result, p)
679 return result
682class ExportEntry(object):
683 def __init__(self, name, prefix, suffix, flags):
684 self.name = name
685 self.prefix = prefix
686 self.suffix = suffix
687 self.flags = flags
689 @cached_property
690 def value(self):
691 return resolve(self.prefix, self.suffix)
693 def __repr__(self): # pragma: no cover
694 return '<ExportEntry %s = %s:%s %s>' % (self.name, self.prefix,
695 self.suffix, self.flags)
697 def __eq__(self, other):
698 if not isinstance(other, ExportEntry):
699 result = False
700 else:
701 result = (self.name == other.name and
702 self.prefix == other.prefix and
703 self.suffix == other.suffix and
704 self.flags == other.flags)
705 return result
707 __hash__ = object.__hash__
710ENTRY_RE = re.compile(r'''(?P<name>(\w|[-.+])+)
711 \s*=\s*(?P<callable>(\w+)([:\.]\w+)*)
712 \s*(\[\s*(?P<flags>[\w-]+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])?
713 ''', re.VERBOSE)
715def get_export_entry(specification):
716 m = ENTRY_RE.search(specification)
717 if not m:
718 result = None
719 if '[' in specification or ']' in specification:
720 raise DistlibException("Invalid specification "
721 "'%s'" % specification)
722 else:
723 d = m.groupdict()
724 name = d['name']
725 path = d['callable']
726 colons = path.count(':')
727 if colons == 0:
728 prefix, suffix = path, None
729 else:
730 if colons != 1:
731 raise DistlibException("Invalid specification "
732 "'%s'" % specification)
733 prefix, suffix = path.split(':')
734 flags = d['flags']
735 if flags is None:
736 if '[' in specification or ']' in specification:
737 raise DistlibException("Invalid specification "
738 "'%s'" % specification)
739 flags = []
740 else:
741 flags = [f.strip() for f in flags.split(',')]
742 result = ExportEntry(name, prefix, suffix, flags)
743 return result
746def get_cache_base(suffix=None):
747 """
748 Return the default base location for distlib caches. If the directory does
749 not exist, it is created. Use the suffix provided for the base directory,
750 and default to '.distlib' if it isn't provided.
752 On Windows, if LOCALAPPDATA is defined in the environment, then it is
753 assumed to be a directory, and will be the parent directory of the result.
754 On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home
755 directory - using os.expanduser('~') - will be the parent directory of
756 the result.
758 The result is just the directory '.distlib' in the parent directory as
759 determined above, or with the name specified with ``suffix``.
760 """
761 if suffix is None:
762 suffix = '.distlib'
763 if os.name == 'nt' and 'LOCALAPPDATA' in os.environ:
764 result = os.path.expandvars('$localappdata')
765 else:
766 # Assume posix, or old Windows
767 result = os.path.expanduser('~')
768 # we use 'isdir' instead of 'exists', because we want to
769 # fail if there's a file with that name
770 if os.path.isdir(result):
771 usable = os.access(result, os.W_OK)
772 if not usable:
773 logger.warning('Directory exists but is not writable: %s', result)
774 else:
775 try:
776 os.makedirs(result)
777 usable = True
778 except OSError:
779 logger.warning('Unable to create %s', result, exc_info=True)
780 usable = False
781 if not usable:
782 result = tempfile.mkdtemp()
783 logger.warning('Default location unusable, using %s', result)
784 return os.path.join(result, suffix)
787def path_to_cache_dir(path):
788 """
789 Convert an absolute path to a directory name for use in a cache.
791 The algorithm used is:
793 #. On Windows, any ``':'`` in the drive is replaced with ``'---'``.
794 #. Any occurrence of ``os.sep`` is replaced with ``'--'``.
795 #. ``'.cache'`` is appended.
796 """
797 d, p = os.path.splitdrive(os.path.abspath(path))
798 if d:
799 d = d.replace(':', '---')
800 p = p.replace(os.sep, '--')
801 return d + p + '.cache'
804def ensure_slash(s):
805 if not s.endswith('/'):
806 return s + '/'
807 return s
810def parse_credentials(netloc):
811 username = password = None
812 if '@' in netloc:
813 prefix, netloc = netloc.rsplit('@', 1)
814 if ':' not in prefix:
815 username = prefix
816 else:
817 username, password = prefix.split(':', 1)
818 if username:
819 username = unquote(username)
820 if password:
821 password = unquote(password)
822 return username, password, netloc
825def get_process_umask():
826 result = os.umask(0o22)
827 os.umask(result)
828 return result
830def is_string_sequence(seq):
831 result = True
832 i = None
833 for i, s in enumerate(seq):
834 if not isinstance(s, string_types):
835 result = False
836 break
837 assert i is not None
838 return result
840PROJECT_NAME_AND_VERSION = re.compile('([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-'
841 '([a-z0-9_.+-]+)', re.I)
842PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)')
845def split_filename(filename, project_name=None):
846 """
847 Extract name, version, python version from a filename (no extension)
849 Return name, version, pyver or None
850 """
851 result = None
852 pyver = None
853 filename = unquote(filename).replace(' ', '-')
854 m = PYTHON_VERSION.search(filename)
855 if m:
856 pyver = m.group(1)
857 filename = filename[:m.start()]
858 if project_name and len(filename) > len(project_name) + 1:
859 m = re.match(re.escape(project_name) + r'\b', filename)
860 if m:
861 n = m.end()
862 result = filename[:n], filename[n + 1:], pyver
863 if result is None:
864 m = PROJECT_NAME_AND_VERSION.match(filename)
865 if m:
866 result = m.group(1), m.group(3), pyver
867 return result
869# Allow spaces in name because of legacy dists like "Twisted Core"
870NAME_VERSION_RE = re.compile(r'(?P<name>[\w .-]+)\s*'
871 r'\(\s*(?P<ver>[^\s)]+)\)$')
873def parse_name_and_version(p):
874 """
875 A utility method used to get name and version from a string.
877 From e.g. a Provides-Dist value.
879 :param p: A value in a form 'foo (1.0)'
880 :return: The name and version as a tuple.
881 """
882 m = NAME_VERSION_RE.match(p)
883 if not m:
884 raise DistlibException('Ill-formed name/version string: \'%s\'' % p)
885 d = m.groupdict()
886 return d['name'].strip().lower(), d['ver']
888def get_extras(requested, available):
889 result = set()
890 requested = set(requested or [])
891 available = set(available or [])
892 if '*' in requested:
893 requested.remove('*')
894 result |= available
895 for r in requested:
896 if r == '-':
897 result.add(r)
898 elif r.startswith('-'):
899 unwanted = r[1:]
900 if unwanted not in available:
901 logger.warning('undeclared extra: %s' % unwanted)
902 if unwanted in result:
903 result.remove(unwanted)
904 else:
905 if r not in available:
906 logger.warning('undeclared extra: %s' % r)
907 result.add(r)
908 return result
909#
910# Extended metadata functionality
911#
913def _get_external_data(url):
914 result = {}
915 try:
916 # urlopen might fail if it runs into redirections,
917 # because of Python issue #13696. Fixed in locators
918 # using a custom redirect handler.
919 resp = urlopen(url)
920 headers = resp.info()
921 ct = headers.get('Content-Type')
922 if not ct.startswith('application/json'):
923 logger.debug('Unexpected response for JSON request: %s', ct)
924 else:
925 reader = codecs.getreader('utf-8')(resp)
926 #data = reader.read().decode('utf-8')
927 #result = json.loads(data)
928 result = json.load(reader)
929 except Exception as e:
930 logger.exception('Failed to get external data for %s: %s', url, e)
931 return result
933_external_data_base_url = 'https://www.red-dove.com/pypi/projects/'
935def get_project_data(name):
936 url = '%s/%s/project.json' % (name[0].upper(), name)
937 url = urljoin(_external_data_base_url, url)
938 result = _get_external_data(url)
939 return result
941def get_package_data(name, version):
942 url = '%s/%s/package-%s.json' % (name[0].upper(), name, version)
943 url = urljoin(_external_data_base_url, url)
944 return _get_external_data(url)
947class Cache(object):
948 """
949 A class implementing a cache for resources that need to live in the file system
950 e.g. shared libraries. This class was moved from resources to here because it
951 could be used by other modules, e.g. the wheel module.
952 """
954 def __init__(self, base):
955 """
956 Initialise an instance.
958 :param base: The base directory where the cache should be located.
959 """
960 # we use 'isdir' instead of 'exists', because we want to
961 # fail if there's a file with that name
962 if not os.path.isdir(base): # pragma: no cover
963 os.makedirs(base)
964 if (os.stat(base).st_mode & 0o77) != 0:
965 logger.warning('Directory \'%s\' is not private', base)
966 self.base = os.path.abspath(os.path.normpath(base))
968 def prefix_to_dir(self, prefix):
969 """
970 Converts a resource prefix to a directory name in the cache.
971 """
972 return path_to_cache_dir(prefix)
974 def clear(self):
975 """
976 Clear the cache.
977 """
978 not_removed = []
979 for fn in os.listdir(self.base):
980 fn = os.path.join(self.base, fn)
981 try:
982 if os.path.islink(fn) or os.path.isfile(fn):
983 os.remove(fn)
984 elif os.path.isdir(fn):
985 shutil.rmtree(fn)
986 except Exception:
987 not_removed.append(fn)
988 return not_removed
991class EventMixin(object):
992 """
993 A very simple publish/subscribe system.
994 """
995 def __init__(self):
996 self._subscribers = {}
998 def add(self, event, subscriber, append=True):
999 """
1000 Add a subscriber for an event.
1002 :param event: The name of an event.
1003 :param subscriber: The subscriber to be added (and called when the
1004 event is published).
1005 :param append: Whether to append or prepend the subscriber to an
1006 existing subscriber list for the event.
1007 """
1008 subs = self._subscribers
1009 if event not in subs:
1010 subs[event] = deque([subscriber])
1011 else:
1012 sq = subs[event]
1013 if append:
1014 sq.append(subscriber)
1015 else:
1016 sq.appendleft(subscriber)
1018 def remove(self, event, subscriber):
1019 """
1020 Remove a subscriber for an event.
1022 :param event: The name of an event.
1023 :param subscriber: The subscriber to be removed.
1024 """
1025 subs = self._subscribers
1026 if event not in subs:
1027 raise ValueError('No subscribers: %r' % event)
1028 subs[event].remove(subscriber)
1030 def get_subscribers(self, event):
1031 """
1032 Return an iterator for the subscribers for an event.
1033 :param event: The event to return subscribers for.
1034 """
1035 return iter(self._subscribers.get(event, ()))
1037 def publish(self, event, *args, **kwargs):
1038 """
1039 Publish a event and return a list of values returned by its
1040 subscribers.
1042 :param event: The event to publish.
1043 :param args: The positional arguments to pass to the event's
1044 subscribers.
1045 :param kwargs: The keyword arguments to pass to the event's
1046 subscribers.
1047 """
1048 result = []
1049 for subscriber in self.get_subscribers(event):
1050 try:
1051 value = subscriber(event, *args, **kwargs)
1052 except Exception:
1053 logger.exception('Exception during event publication')
1054 value = None
1055 result.append(value)
1056 logger.debug('publish %s: args = %s, kwargs = %s, result = %s',
1057 event, args, kwargs, result)
1058 return result
1060#
1061# Simple sequencing
1062#
1063class Sequencer(object):
1064 def __init__(self):
1065 self._preds = {}
1066 self._succs = {}
1067 self._nodes = set() # nodes with no preds/succs
1069 def add_node(self, node):
1070 self._nodes.add(node)
1072 def remove_node(self, node, edges=False):
1073 if node in self._nodes:
1074 self._nodes.remove(node)
1075 if edges:
1076 for p in set(self._preds.get(node, ())):
1077 self.remove(p, node)
1078 for s in set(self._succs.get(node, ())):
1079 self.remove(node, s)
1080 # Remove empties
1081 for k, v in list(self._preds.items()):
1082 if not v:
1083 del self._preds[k]
1084 for k, v in list(self._succs.items()):
1085 if not v:
1086 del self._succs[k]
1088 def add(self, pred, succ):
1089 assert pred != succ
1090 self._preds.setdefault(succ, set()).add(pred)
1091 self._succs.setdefault(pred, set()).add(succ)
1093 def remove(self, pred, succ):
1094 assert pred != succ
1095 try:
1096 preds = self._preds[succ]
1097 succs = self._succs[pred]
1098 except KeyError: # pragma: no cover
1099 raise ValueError('%r not a successor of anything' % succ)
1100 try:
1101 preds.remove(pred)
1102 succs.remove(succ)
1103 except KeyError: # pragma: no cover
1104 raise ValueError('%r not a successor of %r' % (succ, pred))
1106 def is_step(self, step):
1107 return (step in self._preds or step in self._succs or
1108 step in self._nodes)
1110 def get_steps(self, final):
1111 if not self.is_step(final):
1112 raise ValueError('Unknown: %r' % final)
1113 result = []
1114 todo = []
1115 seen = set()
1116 todo.append(final)
1117 while todo:
1118 step = todo.pop(0)
1119 if step in seen:
1120 # if a step was already seen,
1121 # move it to the end (so it will appear earlier
1122 # when reversed on return) ... but not for the
1123 # final step, as that would be confusing for
1124 # users
1125 if step != final:
1126 result.remove(step)
1127 result.append(step)
1128 else:
1129 seen.add(step)
1130 result.append(step)
1131 preds = self._preds.get(step, ())
1132 todo.extend(preds)
1133 return reversed(result)
1135 @property
1136 def strong_connections(self):
1137 #http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm
1138 index_counter = [0]
1139 stack = []
1140 lowlinks = {}
1141 index = {}
1142 result = []
1144 graph = self._succs
1146 def strongconnect(node):
1147 # set the depth index for this node to the smallest unused index
1148 index[node] = index_counter[0]
1149 lowlinks[node] = index_counter[0]
1150 index_counter[0] += 1
1151 stack.append(node)
1153 # Consider successors
1154 try:
1155 successors = graph[node]
1156 except Exception:
1157 successors = []
1158 for successor in successors:
1159 if successor not in lowlinks:
1160 # Successor has not yet been visited
1161 strongconnect(successor)
1162 lowlinks[node] = min(lowlinks[node],lowlinks[successor])
1163 elif successor in stack:
1164 # the successor is in the stack and hence in the current
1165 # strongly connected component (SCC)
1166 lowlinks[node] = min(lowlinks[node],index[successor])
1168 # If `node` is a root node, pop the stack and generate an SCC
1169 if lowlinks[node] == index[node]:
1170 connected_component = []
1172 while True:
1173 successor = stack.pop()
1174 connected_component.append(successor)
1175 if successor == node: break
1176 component = tuple(connected_component)
1177 # storing the result
1178 result.append(component)
1180 for node in graph:
1181 if node not in lowlinks:
1182 strongconnect(node)
1184 return result
1186 @property
1187 def dot(self):
1188 result = ['digraph G {']
1189 for succ in self._preds:
1190 preds = self._preds[succ]
1191 for pred in preds:
1192 result.append(' %s -> %s;' % (pred, succ))
1193 for node in self._nodes:
1194 result.append(' %s;' % node)
1195 result.append('}')
1196 return '\n'.join(result)
1198#
1199# Unarchiving functionality for zip, tar, tgz, tbz, whl
1200#
1202ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip',
1203 '.tgz', '.tbz', '.whl')
1205def unarchive(archive_filename, dest_dir, format=None, check=True):
1207 def check_path(path):
1208 if not isinstance(path, text_type):
1209 path = path.decode('utf-8')
1210 p = os.path.abspath(os.path.join(dest_dir, path))
1211 if not p.startswith(dest_dir) or p[plen] != os.sep:
1212 raise ValueError('path outside destination: %r' % p)
1214 dest_dir = os.path.abspath(dest_dir)
1215 plen = len(dest_dir)
1216 archive = None
1217 if format is None:
1218 if archive_filename.endswith(('.zip', '.whl')):
1219 format = 'zip'
1220 elif archive_filename.endswith(('.tar.gz', '.tgz')):
1221 format = 'tgz'
1222 mode = 'r:gz'
1223 elif archive_filename.endswith(('.tar.bz2', '.tbz')):
1224 format = 'tbz'
1225 mode = 'r:bz2'
1226 elif archive_filename.endswith('.tar'):
1227 format = 'tar'
1228 mode = 'r'
1229 else: # pragma: no cover
1230 raise ValueError('Unknown format for %r' % archive_filename)
1231 try:
1232 if format == 'zip':
1233 archive = ZipFile(archive_filename, 'r')
1234 if check:
1235 names = archive.namelist()
1236 for name in names:
1237 check_path(name)
1238 else:
1239 archive = tarfile.open(archive_filename, mode)
1240 if check:
1241 names = archive.getnames()
1242 for name in names:
1243 check_path(name)
1244 if format != 'zip' and sys.version_info[0] < 3:
1245 # See Python issue 17153. If the dest path contains Unicode,
1246 # tarfile extraction fails on Python 2.x if a member path name
1247 # contains non-ASCII characters - it leads to an implicit
1248 # bytes -> unicode conversion using ASCII to decode.
1249 for tarinfo in archive.getmembers():
1250 if not isinstance(tarinfo.name, text_type):
1251 tarinfo.name = tarinfo.name.decode('utf-8')
1252 archive.extractall(dest_dir)
1254 finally:
1255 if archive:
1256 archive.close()
1259def zip_dir(directory):
1260 """zip a directory tree into a BytesIO object"""
1261 result = io.BytesIO()
1262 dlen = len(directory)
1263 with ZipFile(result, "w") as zf:
1264 for root, dirs, files in os.walk(directory):
1265 for name in files:
1266 full = os.path.join(root, name)
1267 rel = root[dlen:]
1268 dest = os.path.join(rel, name)
1269 zf.write(full, dest)
1270 return result
1272#
1273# Simple progress bar
1274#
1276UNITS = ('', 'K', 'M', 'G','T','P')
1279class Progress(object):
1280 unknown = 'UNKNOWN'
1282 def __init__(self, minval=0, maxval=100):
1283 assert maxval is None or maxval >= minval
1284 self.min = self.cur = minval
1285 self.max = maxval
1286 self.started = None
1287 self.elapsed = 0
1288 self.done = False
1290 def update(self, curval):
1291 assert self.min <= curval
1292 assert self.max is None or curval <= self.max
1293 self.cur = curval
1294 now = time.time()
1295 if self.started is None:
1296 self.started = now
1297 else:
1298 self.elapsed = now - self.started
1300 def increment(self, incr):
1301 assert incr >= 0
1302 self.update(self.cur + incr)
1304 def start(self):
1305 self.update(self.min)
1306 return self
1308 def stop(self):
1309 if self.max is not None:
1310 self.update(self.max)
1311 self.done = True
1313 @property
1314 def maximum(self):
1315 return self.unknown if self.max is None else self.max
1317 @property
1318 def percentage(self):
1319 if self.done:
1320 result = '100 %'
1321 elif self.max is None:
1322 result = ' ?? %'
1323 else:
1324 v = 100.0 * (self.cur - self.min) / (self.max - self.min)
1325 result = '%3d %%' % v
1326 return result
1328 def format_duration(self, duration):
1329 if (duration <= 0) and self.max is None or self.cur == self.min:
1330 result = '??:??:??'
1331 #elif duration < 1:
1332 # result = '--:--:--'
1333 else:
1334 result = time.strftime('%H:%M:%S', time.gmtime(duration))
1335 return result
1337 @property
1338 def ETA(self):
1339 if self.done:
1340 prefix = 'Done'
1341 t = self.elapsed
1342 #import pdb; pdb.set_trace()
1343 else:
1344 prefix = 'ETA '
1345 if self.max is None:
1346 t = -1
1347 elif self.elapsed == 0 or (self.cur == self.min):
1348 t = 0
1349 else:
1350 #import pdb; pdb.set_trace()
1351 t = float(self.max - self.min)
1352 t /= self.cur - self.min
1353 t = (t - 1) * self.elapsed
1354 return '%s: %s' % (prefix, self.format_duration(t))
1356 @property
1357 def speed(self):
1358 if self.elapsed == 0:
1359 result = 0.0
1360 else:
1361 result = (self.cur - self.min) / self.elapsed
1362 for unit in UNITS:
1363 if result < 1000:
1364 break
1365 result /= 1000.0
1366 return '%d %sB/s' % (result, unit)
1368#
1369# Glob functionality
1370#
1372RICH_GLOB = re.compile(r'\{([^}]*)\}')
1373_CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]')
1374_CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$')
1377def iglob(path_glob):
1378 """Extended globbing function that supports ** and {opt1,opt2,opt3}."""
1379 if _CHECK_RECURSIVE_GLOB.search(path_glob):
1380 msg = """invalid glob %r: recursive glob "**" must be used alone"""
1381 raise ValueError(msg % path_glob)
1382 if _CHECK_MISMATCH_SET.search(path_glob):
1383 msg = """invalid glob %r: mismatching set marker '{' or '}'"""
1384 raise ValueError(msg % path_glob)
1385 return _iglob(path_glob)
1388def _iglob(path_glob):
1389 rich_path_glob = RICH_GLOB.split(path_glob, 1)
1390 if len(rich_path_glob) > 1:
1391 assert len(rich_path_glob) == 3, rich_path_glob
1392 prefix, set, suffix = rich_path_glob
1393 for item in set.split(','):
1394 for path in _iglob(''.join((prefix, item, suffix))):
1395 yield path
1396 else:
1397 if '**' not in path_glob:
1398 for item in std_iglob(path_glob):
1399 yield item
1400 else:
1401 prefix, radical = path_glob.split('**', 1)
1402 if prefix == '':
1403 prefix = '.'
1404 if radical == '':
1405 radical = '*'
1406 else:
1407 # we support both
1408 radical = radical.lstrip('/')
1409 radical = radical.lstrip('\\')
1410 for path, dir, files in os.walk(prefix):
1411 path = os.path.normpath(path)
1412 for fn in _iglob(os.path.join(path, radical)):
1413 yield fn
1415if ssl:
1416 from .compat import (HTTPSHandler as BaseHTTPSHandler, match_hostname,
1417 CertificateError)
1420#
1421# HTTPSConnection which verifies certificates/matches domains
1422#
1424 class HTTPSConnection(httplib.HTTPSConnection):
1425 ca_certs = None # set this to the path to the certs file (.pem)
1426 check_domain = True # only used if ca_certs is not None
1428 # noinspection PyPropertyAccess
1429 def connect(self):
1430 sock = socket.create_connection((self.host, self.port), self.timeout)
1431 if getattr(self, '_tunnel_host', False):
1432 self.sock = sock
1433 self._tunnel()
1435 context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
1436 if hasattr(ssl, 'OP_NO_SSLv2'):
1437 context.options |= ssl.OP_NO_SSLv2
1438 if self.cert_file:
1439 context.load_cert_chain(self.cert_file, self.key_file)
1440 kwargs = {}
1441 if self.ca_certs:
1442 context.verify_mode = ssl.CERT_REQUIRED
1443 context.load_verify_locations(cafile=self.ca_certs)
1444 if getattr(ssl, 'HAS_SNI', False):
1445 kwargs['server_hostname'] = self.host
1447 self.sock = context.wrap_socket(sock, **kwargs)
1448 if self.ca_certs and self.check_domain:
1449 try:
1450 match_hostname(self.sock.getpeercert(), self.host)
1451 logger.debug('Host verified: %s', self.host)
1452 except CertificateError: # pragma: no cover
1453 self.sock.shutdown(socket.SHUT_RDWR)
1454 self.sock.close()
1455 raise
1457 class HTTPSHandler(BaseHTTPSHandler):
1458 def __init__(self, ca_certs, check_domain=True):
1459 BaseHTTPSHandler.__init__(self)
1460 self.ca_certs = ca_certs
1461 self.check_domain = check_domain
1463 def _conn_maker(self, *args, **kwargs):
1464 """
1465 This is called to create a connection instance. Normally you'd
1466 pass a connection class to do_open, but it doesn't actually check for
1467 a class, and just expects a callable. As long as we behave just as a
1468 constructor would have, we should be OK. If it ever changes so that
1469 we *must* pass a class, we'll create an UnsafeHTTPSConnection class
1470 which just sets check_domain to False in the class definition, and
1471 choose which one to pass to do_open.
1472 """
1473 result = HTTPSConnection(*args, **kwargs)
1474 if self.ca_certs:
1475 result.ca_certs = self.ca_certs
1476 result.check_domain = self.check_domain
1477 return result
1479 def https_open(self, req):
1480 try:
1481 return self.do_open(self._conn_maker, req)
1482 except URLError as e:
1483 if 'certificate verify failed' in str(e.reason):
1484 raise CertificateError('Unable to verify server certificate '
1485 'for %s' % req.host)
1486 else:
1487 raise
1489 #
1490 # To prevent against mixing HTTP traffic with HTTPS (examples: A Man-In-The-
1491 # Middle proxy using HTTP listens on port 443, or an index mistakenly serves
1492 # HTML containing a http://xyz link when it should be https://xyz),
1493 # you can use the following handler class, which does not allow HTTP traffic.
1494 #
1495 # It works by inheriting from HTTPHandler - so build_opener won't add a
1496 # handler for HTTP itself.
1497 #
1498 class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler):
1499 def http_open(self, req):
1500 raise URLError('Unexpected HTTP request on what should be a secure '
1501 'connection: %s' % req)
1503#
1504# XML-RPC with timeouts
1505#
1506class Transport(xmlrpclib.Transport):
1507 def __init__(self, timeout, use_datetime=0):
1508 self.timeout = timeout
1509 xmlrpclib.Transport.__init__(self, use_datetime)
1511 def make_connection(self, host):
1512 h, eh, x509 = self.get_host_info(host)
1513 if not self._connection or host != self._connection[0]:
1514 self._extra_headers = eh
1515 self._connection = host, httplib.HTTPConnection(h)
1516 return self._connection[1]
1518if ssl:
1519 class SafeTransport(xmlrpclib.SafeTransport):
1520 def __init__(self, timeout, use_datetime=0):
1521 self.timeout = timeout
1522 xmlrpclib.SafeTransport.__init__(self, use_datetime)
1524 def make_connection(self, host):
1525 h, eh, kwargs = self.get_host_info(host)
1526 if not kwargs:
1527 kwargs = {}
1528 kwargs['timeout'] = self.timeout
1529 if not self._connection or host != self._connection[0]:
1530 self._extra_headers = eh
1531 self._connection = host, httplib.HTTPSConnection(h, None,
1532 **kwargs)
1533 return self._connection[1]
1536class ServerProxy(xmlrpclib.ServerProxy):
1537 def __init__(self, uri, **kwargs):
1538 self.timeout = timeout = kwargs.pop('timeout', None)
1539 # The above classes only come into play if a timeout
1540 # is specified
1541 if timeout is not None:
1542 # scheme = splittype(uri) # deprecated as of Python 3.8
1543 scheme = urlparse(uri)[0]
1544 use_datetime = kwargs.get('use_datetime', 0)
1545 if scheme == 'https':
1546 tcls = SafeTransport
1547 else:
1548 tcls = Transport
1549 kwargs['transport'] = t = tcls(timeout, use_datetime=use_datetime)
1550 self.transport = t
1551 xmlrpclib.ServerProxy.__init__(self, uri, **kwargs)
1553#
1554# CSV functionality. This is provided because on 2.x, the csv module can't
1555# handle Unicode. However, we need to deal with Unicode in e.g. RECORD files.
1556#
1558def _csv_open(fn, mode, **kwargs):
1559 if sys.version_info[0] < 3:
1560 mode += 'b'
1561 else:
1562 kwargs['newline'] = ''
1563 # Python 3 determines encoding from locale. Force 'utf-8'
1564 # file encoding to match other forced utf-8 encoding
1565 kwargs['encoding'] = 'utf-8'
1566 return open(fn, mode, **kwargs)
1569class CSVBase(object):
1570 defaults = {
1571 'delimiter': str(','), # The strs are used because we need native
1572 'quotechar': str('"'), # str in the csv API (2.x won't take
1573 'lineterminator': str('\n') # Unicode)
1574 }
1576 def __enter__(self):
1577 return self
1579 def __exit__(self, *exc_info):
1580 self.stream.close()
1583class CSVReader(CSVBase):
1584 def __init__(self, **kwargs):
1585 if 'stream' in kwargs:
1586 stream = kwargs['stream']
1587 if sys.version_info[0] >= 3:
1588 # needs to be a text stream
1589 stream = codecs.getreader('utf-8')(stream)
1590 self.stream = stream
1591 else:
1592 self.stream = _csv_open(kwargs['path'], 'r')
1593 self.reader = csv.reader(self.stream, **self.defaults)
1595 def __iter__(self):
1596 return self
1598 def next(self):
1599 result = next(self.reader)
1600 if sys.version_info[0] < 3:
1601 for i, item in enumerate(result):
1602 if not isinstance(item, text_type):
1603 result[i] = item.decode('utf-8')
1604 return result
1606 __next__ = next
1608class CSVWriter(CSVBase):
1609 def __init__(self, fn, **kwargs):
1610 self.stream = _csv_open(fn, 'w')
1611 self.writer = csv.writer(self.stream, **self.defaults)
1613 def writerow(self, row):
1614 if sys.version_info[0] < 3:
1615 r = []
1616 for item in row:
1617 if isinstance(item, text_type):
1618 item = item.encode('utf-8')
1619 r.append(item)
1620 row = r
1621 self.writer.writerow(row)
1623#
1624# Configurator functionality
1625#
1627class Configurator(BaseConfigurator):
1629 value_converters = dict(BaseConfigurator.value_converters)
1630 value_converters['inc'] = 'inc_convert'
1632 def __init__(self, config, base=None):
1633 super(Configurator, self).__init__(config)
1634 self.base = base or os.getcwd()
1636 def configure_custom(self, config):
1637 def convert(o):
1638 if isinstance(o, (list, tuple)):
1639 result = type(o)([convert(i) for i in o])
1640 elif isinstance(o, dict):
1641 if '()' in o:
1642 result = self.configure_custom(o)
1643 else:
1644 result = {}
1645 for k in o:
1646 result[k] = convert(o[k])
1647 else:
1648 result = self.convert(o)
1649 return result
1651 c = config.pop('()')
1652 if not callable(c):
1653 c = self.resolve(c)
1654 props = config.pop('.', None)
1655 # Check for valid identifiers
1656 args = config.pop('[]', ())
1657 if args:
1658 args = tuple([convert(o) for o in args])
1659 items = [(k, convert(config[k])) for k in config if valid_ident(k)]
1660 kwargs = dict(items)
1661 result = c(*args, **kwargs)
1662 if props:
1663 for n, v in props.items():
1664 setattr(result, n, convert(v))
1665 return result
1667 def __getitem__(self, key):
1668 result = self.config[key]
1669 if isinstance(result, dict) and '()' in result:
1670 self.config[key] = result = self.configure_custom(result)
1671 return result
1673 def inc_convert(self, value):
1674 """Default converter for the inc:// protocol."""
1675 if not os.path.isabs(value):
1676 value = os.path.join(self.base, value)
1677 with codecs.open(value, 'r', encoding='utf-8') as f:
1678 result = json.load(f)
1679 return result
1682class SubprocessMixin(object):
1683 """
1684 Mixin for running subprocesses and capturing their output
1685 """
1686 def __init__(self, verbose=False, progress=None):
1687 self.verbose = verbose
1688 self.progress = progress
1690 def reader(self, stream, context):
1691 """
1692 Read lines from a subprocess' output stream and either pass to a progress
1693 callable (if specified) or write progress information to sys.stderr.
1694 """
1695 progress = self.progress
1696 verbose = self.verbose
1697 while True:
1698 s = stream.readline()
1699 if not s:
1700 break
1701 if progress is not None:
1702 progress(s, context)
1703 else:
1704 if not verbose:
1705 sys.stderr.write('.')
1706 else:
1707 sys.stderr.write(s.decode('utf-8'))
1708 sys.stderr.flush()
1709 stream.close()
1711 def run_command(self, cmd, **kwargs):
1712 p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
1713 stderr=subprocess.PIPE, **kwargs)
1714 t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout'))
1715 t1.start()
1716 t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr'))
1717 t2.start()
1718 p.wait()
1719 t1.join()
1720 t2.join()
1721 if self.progress is not None:
1722 self.progress('done.', 'main')
1723 elif self.verbose:
1724 sys.stderr.write('done.\n')
1725 return p
1728def normalize_name(name):
1729 """Normalize a python package name a la PEP 503"""
1730 # https://www.python.org/dev/peps/pep-0503/#normalized-names
1731 return re.sub('[-_.]+', '-', name).lower()
1733# def _get_pypirc_command():
1734 # """
1735 # Get the distutils command for interacting with PyPI configurations.
1736 # :return: the command.
1737 # """
1738 # from distutils.core import Distribution
1739 # from distutils.config import PyPIRCCommand
1740 # d = Distribution()
1741 # return PyPIRCCommand(d)
1743class PyPIRCFile(object):
1745 DEFAULT_REPOSITORY = 'https://upload.pypi.org/legacy/'
1746 DEFAULT_REALM = 'pypi'
1748 def __init__(self, fn=None, url=None):
1749 if fn is None:
1750 fn = os.path.join(os.path.expanduser('~'), '.pypirc')
1751 self.filename = fn
1752 self.url = url
1754 def read(self):
1755 result = {}
1757 if os.path.exists(self.filename):
1758 repository = self.url or self.DEFAULT_REPOSITORY
1760 config = configparser.RawConfigParser()
1761 config.read(self.filename)
1762 sections = config.sections()
1763 if 'distutils' in sections:
1764 # let's get the list of servers
1765 index_servers = config.get('distutils', 'index-servers')
1766 _servers = [server.strip() for server in
1767 index_servers.split('\n')
1768 if server.strip() != '']
1769 if _servers == []:
1770 # nothing set, let's try to get the default pypi
1771 if 'pypi' in sections:
1772 _servers = ['pypi']
1773 else:
1774 for server in _servers:
1775 result = {'server': server}
1776 result['username'] = config.get(server, 'username')
1778 # optional params
1779 for key, default in (('repository', self.DEFAULT_REPOSITORY),
1780 ('realm', self.DEFAULT_REALM),
1781 ('password', None)):
1782 if config.has_option(server, key):
1783 result[key] = config.get(server, key)
1784 else:
1785 result[key] = default
1787 # work around people having "repository" for the "pypi"
1788 # section of their config set to the HTTP (rather than
1789 # HTTPS) URL
1790 if (server == 'pypi' and
1791 repository in (self.DEFAULT_REPOSITORY, 'pypi')):
1792 result['repository'] = self.DEFAULT_REPOSITORY
1793 elif (result['server'] != repository and
1794 result['repository'] != repository):
1795 result = {}
1796 elif 'server-login' in sections:
1797 # old format
1798 server = 'server-login'
1799 if config.has_option(server, 'repository'):
1800 repository = config.get(server, 'repository')
1801 else:
1802 repository = self.DEFAULT_REPOSITORY
1803 result = {
1804 'username': config.get(server, 'username'),
1805 'password': config.get(server, 'password'),
1806 'repository': repository,
1807 'server': server,
1808 'realm': self.DEFAULT_REALM
1809 }
1810 return result
1812 def update(self, username, password):
1813 # import pdb; pdb.set_trace()
1814 config = configparser.RawConfigParser()
1815 fn = self.filename
1816 config.read(fn)
1817 if not config.has_section('pypi'):
1818 config.add_section('pypi')
1819 config.set('pypi', 'username', username)
1820 config.set('pypi', 'password', password)
1821 with open(fn, 'w') as f:
1822 config.write(f)
1824def _load_pypirc(index):
1825 """
1826 Read the PyPI access configuration as supported by distutils.
1827 """
1828 return PyPIRCFile(url=index.url).read()
1830def _store_pypirc(index):
1831 PyPIRCFile().update(index.username, index.password)
1833#
1834# get_platform()/get_host_platform() copied from Python 3.10.a0 source, with some minor
1835# tweaks
1836#
1838def get_host_platform():
1839 """Return a string that identifies the current platform. This is used mainly to
1840 distinguish platform-specific build directories and platform-specific built
1841 distributions. Typically includes the OS name and version and the
1842 architecture (as supplied by 'os.uname()'), although the exact information
1843 included depends on the OS; eg. on Linux, the kernel version isn't
1844 particularly important.
1846 Examples of returned values:
1847 linux-i586
1848 linux-alpha (?)
1849 solaris-2.6-sun4u
1851 Windows will return one of:
1852 win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc)
1853 win32 (all others - specifically, sys.platform is returned)
1855 For other non-POSIX platforms, currently just returns 'sys.platform'.
1857 """
1858 if os.name == 'nt':
1859 if 'amd64' in sys.version.lower():
1860 return 'win-amd64'
1861 if '(arm)' in sys.version.lower():
1862 return 'win-arm32'
1863 if '(arm64)' in sys.version.lower():
1864 return 'win-arm64'
1865 return sys.platform
1867 # Set for cross builds explicitly
1868 if "_PYTHON_HOST_PLATFORM" in os.environ:
1869 return os.environ["_PYTHON_HOST_PLATFORM"]
1871 if os.name != 'posix' or not hasattr(os, 'uname'):
1872 # XXX what about the architecture? NT is Intel or Alpha,
1873 # Mac OS is M68k or PPC, etc.
1874 return sys.platform
1876 # Try to distinguish various flavours of Unix
1878 (osname, host, release, version, machine) = os.uname()
1880 # Convert the OS name to lowercase, remove '/' characters, and translate
1881 # spaces (for "Power Macintosh")
1882 osname = osname.lower().replace('/', '')
1883 machine = machine.replace(' ', '_').replace('/', '-')
1885 if osname[:5] == 'linux':
1886 # At least on Linux/Intel, 'machine' is the processor --
1887 # i386, etc.
1888 # XXX what about Alpha, SPARC, etc?
1889 return "%s-%s" % (osname, machine)
1891 elif osname[:5] == 'sunos':
1892 if release[0] >= '5': # SunOS 5 == Solaris 2
1893 osname = 'solaris'
1894 release = '%d.%s' % (int(release[0]) - 3, release[2:])
1895 # We can't use 'platform.architecture()[0]' because a
1896 # bootstrap problem. We use a dict to get an error
1897 # if some suspicious happens.
1898 bitness = {2147483647:'32bit', 9223372036854775807:'64bit'}
1899 machine += '.%s' % bitness[sys.maxsize]
1900 # fall through to standard osname-release-machine representation
1901 elif osname[:3] == 'aix':
1902 from _aix_support import aix_platform
1903 return aix_platform()
1904 elif osname[:6] == 'cygwin':
1905 osname = 'cygwin'
1906 rel_re = re.compile (r'[\d.]+', re.ASCII)
1907 m = rel_re.match(release)
1908 if m:
1909 release = m.group()
1910 elif osname[:6] == 'darwin':
1911 import _osx_support, distutils.sysconfig
1912 osname, release, machine = _osx_support.get_platform_osx(
1913 distutils.sysconfig.get_config_vars(),
1914 osname, release, machine)
1916 return '%s-%s-%s' % (osname, release, machine)
1919_TARGET_TO_PLAT = {
1920 'x86' : 'win32',
1921 'x64' : 'win-amd64',
1922 'arm' : 'win-arm32',
1923}
1926def get_platform():
1927 if os.name != 'nt':
1928 return get_host_platform()
1929 cross_compilation_target = os.environ.get('VSCMD_ARG_TGT_ARCH')
1930 if cross_compilation_target not in _TARGET_TO_PLAT:
1931 return get_host_platform()
1932 return _TARGET_TO_PLAT[cross_compilation_target]