Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/dill/_dill.py: 29%
Shortcuts on this page
r m x toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
Shortcuts on this page
r m x toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# -*- coding: utf-8 -*-
2#
3# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4# Copyright (c) 2008-2015 California Institute of Technology.
5# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6# License: 3-clause BSD. The full license text is available at:
7# - https://github.com/uqfoundation/dill/blob/master/LICENSE
8"""
9dill: a utility for serialization of python objects
11The primary functions in `dill` are :func:`dump` and
12:func:`dumps` for serialization ("pickling") to a
13file or to a string, respectively, and :func:`load`
14and :func:`loads` for deserialization ("unpickling"),
15similarly, from a file or from a string. Other notable
16functions are :func:`~dill.dump_module` and
17:func:`~dill.load_module`, which are used to save and
18restore module objects, including an intepreter session.
20Based on code written by Oren Tirosh and Armin Ronacher.
21Extended to a (near) full set of the builtin types (in types module),
22and coded to the pickle interface, by <mmckerns@caltech.edu>.
23Initial port to python3 by Jonathan Dobson, continued by mmckerns.
24Tested against "all" python types (Std. Lib. CH 1-15 @ 2.7) by mmckerns.
25Tested against CH16+ Std. Lib. ... TBD.
26"""
28from __future__ import annotations
30__all__ = [
31 'dump','dumps','load','loads','copy',
32 'Pickler','Unpickler','register','pickle','pickles','check',
33 'DEFAULT_PROTOCOL','HIGHEST_PROTOCOL','HANDLE_FMODE','CONTENTS_FMODE','FILE_FMODE',
34 'PickleError','PickleWarning','PicklingError','PicklingWarning','UnpicklingError',
35 'UnpicklingWarning',
36]
38__module__ = 'dill'
40import warnings
41from .logger import adapter as logger
42from .logger import trace as _trace
43log = logger # backward compatibility (see issue #582)
45import os
46import sys
47diff = None
48_use_diff = False
49OLD38 = (sys.hexversion < 0x3080000)
50OLD39 = (sys.hexversion < 0x3090000)
51OLD310 = (sys.hexversion < 0x30a0000)
52OLD312a7 = (sys.hexversion < 0x30c00a7)
53#XXX: get types from .objtypes ?
54import builtins as __builtin__
55from pickle import _Pickler as StockPickler, Unpickler as StockUnpickler
56from pickle import GLOBAL, POP
57from _thread import LockType
58from _thread import RLock as RLockType
59#from io import IOBase
60from types import CodeType, FunctionType, MethodType, GeneratorType, \
61 TracebackType, FrameType, ModuleType, BuiltinMethodType
62BufferType = memoryview #XXX: unregistered
63ClassType = type # no 'old-style' classes
64EllipsisType = type(Ellipsis)
65#FileType = IOBase
66NotImplementedType = type(NotImplemented)
67SliceType = slice
68TypeType = type # 'new-style' classes #XXX: unregistered
69XRangeType = range
70from types import MappingProxyType as DictProxyType, new_class
71from pickle import DEFAULT_PROTOCOL, HIGHEST_PROTOCOL, PickleError, PicklingError, UnpicklingError
72import __main__ as _main_module
73import marshal
74import gc
75# import zlib
76import abc
77import dataclasses
78from weakref import ReferenceType, ProxyType, CallableProxyType
79from collections import OrderedDict
80from enum import Enum, EnumMeta
81from functools import partial
82from operator import itemgetter, attrgetter
83GENERATOR_FAIL = False
84import importlib.machinery
85EXTENSION_SUFFIXES = tuple(importlib.machinery.EXTENSION_SUFFIXES)
86try:
87 import ctypes
88 HAS_CTYPES = True
89 # if using `pypy`, pythonapi is not found
90 IS_PYPY = not hasattr(ctypes, 'pythonapi')
91except ImportError:
92 HAS_CTYPES = False
93 IS_PYPY = False
94NumpyUfuncType = None
95NumpyDType = None
96NumpyArrayType = None
97try:
98 if not importlib.machinery.PathFinder().find_spec('numpy'):
99 raise ImportError("No module named 'numpy'")
100 NumpyUfuncType = True
101 NumpyDType = True
102 NumpyArrayType = True
103except ImportError:
104 pass
105def __hook__():
106 global NumpyArrayType, NumpyDType, NumpyUfuncType
107 from numpy import ufunc as NumpyUfuncType
108 from numpy import ndarray as NumpyArrayType
109 from numpy import dtype as NumpyDType
110 return True
111if NumpyArrayType: # then has numpy
112 def ndarraysubclassinstance(obj_type):
113 if all((c.__module__, c.__name__) != ('numpy', 'ndarray') for c in obj_type.__mro__):
114 return False
115 # anything below here is a numpy array (or subclass) instance
116 __hook__() # import numpy (so the following works!!!)
117 # verify that __reduce__ has not been overridden
118 if obj_type.__reduce_ex__ is not NumpyArrayType.__reduce_ex__ \
119 or obj_type.__reduce__ is not NumpyArrayType.__reduce__:
120 return False
121 return True
122 def numpyufunc(obj_type):
123 return any((c.__module__, c.__name__) == ('numpy', 'ufunc') for c in obj_type.__mro__)
124 def numpydtype(obj_type):
125 if all((c.__module__, c.__name__) != ('numpy', 'dtype') for c in obj_type.__mro__):
126 return False
127 # anything below here is a numpy dtype
128 __hook__() # import numpy (so the following works!!!)
129 return obj_type is type(NumpyDType) # handles subclasses
130else:
131 def ndarraysubclassinstance(obj): return False
132 def numpyufunc(obj): return False
133 def numpydtype(obj): return False
135from types import GetSetDescriptorType, ClassMethodDescriptorType, \
136 WrapperDescriptorType, MethodDescriptorType, MemberDescriptorType, \
137 MethodWrapperType #XXX: unused
139# make sure to add these 'hand-built' types to _typemap
140CellType = type((lambda x: lambda y: x)(0).__closure__[0])
141PartialType = type(partial(int, base=2))
142SuperType = type(super(Exception, TypeError()))
143ItemGetterType = type(itemgetter(0))
144AttrGetterType = type(attrgetter('__repr__'))
146try:
147 from functools import _lru_cache_wrapper as LRUCacheType
148except ImportError:
149 LRUCacheType = None
151if not isinstance(LRUCacheType, type):
152 LRUCacheType = None
154def get_file_type(*args, **kwargs):
155 open = kwargs.pop("open", __builtin__.open)
156 f = open(os.devnull, *args, **kwargs)
157 t = type(f)
158 f.close()
159 return t
161IS_PYODIDE = sys.platform == 'emscripten'
163FileType = get_file_type('rb', buffering=0)
164TextWrapperType = get_file_type('r', buffering=-1)
165BufferedRandomType = None if IS_PYODIDE else get_file_type('r+b', buffering=-1)
166BufferedReaderType = get_file_type('rb', buffering=-1)
167BufferedWriterType = get_file_type('wb', buffering=-1)
168try:
169 from _pyio import open as _open
170 PyTextWrapperType = get_file_type('r', buffering=-1, open=_open)
171 PyBufferedRandomType = None if IS_PYODIDE else get_file_type('r+b', buffering=-1, open=_open)
172 PyBufferedReaderType = get_file_type('rb', buffering=-1, open=_open)
173 PyBufferedWriterType = get_file_type('wb', buffering=-1, open=_open)
174except ImportError:
175 PyTextWrapperType = PyBufferedRandomType = PyBufferedReaderType = PyBufferedWriterType = None
176from io import BytesIO as StringIO
177InputType = OutputType = None
178from socket import socket as SocketType
179#FIXME: additionally calls ForkingPickler.register several times
180from multiprocessing.reduction import _reduce_socket as reduce_socket
181try: #pragma: no cover
182 IS_IPYTHON = __IPYTHON__ # is True
183 ExitType = None # IPython.core.autocall.ExitAutocall
184 IPYTHON_SINGLETONS = ('exit', 'quit', 'get_ipython')
185except NameError:
186 IS_IPYTHON = False
187 try: ExitType = type(exit) # apparently 'exit' can be removed
188 except NameError: ExitType = None
189 IPYTHON_SINGLETONS = ()
191import inspect
192import typing
195### Shims for different versions of Python and dill
196class Sentinel(object):
197 """
198 Create a unique sentinel object that is pickled as a constant.
199 """
200 def __init__(self, name, module_name=None):
201 self.name = name
202 if module_name is None:
203 # Use the calling frame's module
204 self.__module__ = inspect.currentframe().f_back.f_globals['__name__']
205 else:
206 self.__module__ = module_name # pragma: no cover
207 def __repr__(self):
208 return self.__module__ + '.' + self.name # pragma: no cover
209 def __copy__(self):
210 return self # pragma: no cover
211 def __deepcopy__(self, memo):
212 return self # pragma: no cover
213 def __reduce__(self):
214 return self.name
215 def __reduce_ex__(self, protocol):
216 return self.name
218from . import _shims
219from ._shims import Reduce, Getattr
221### File modes
222#: Pickles the file handle, preserving mode. The position of the unpickled
223#: object is as for a new file handle.
224HANDLE_FMODE = 0
225#: Pickles the file contents, creating a new file if on load the file does
226#: not exist. The position = min(pickled position, EOF) and mode is chosen
227#: as such that "best" preserves behavior of the original file.
228CONTENTS_FMODE = 1
229#: Pickles the entire file (handle and contents), preserving mode and position.
230FILE_FMODE = 2
232### Shorthands (modified from python2.5/lib/pickle.py)
233def copy(obj, *args, **kwds):
234 """
235 Use pickling to 'copy' an object (i.e. `loads(dumps(obj))`).
237 See :func:`dumps` and :func:`loads` for keyword arguments.
238 """
239 ignore = kwds.pop('ignore', Unpickler.settings['ignore'])
240 return loads(dumps(obj, *args, **kwds), ignore=ignore)
242def dump(obj, file, protocol=None, byref=None, fmode=None, recurse=None, **kwds):#, strictio=None):
243 """
244 Pickle an object to a file.
246 See :func:`dumps` for keyword arguments.
247 """
248 from .settings import settings
249 protocol = settings['protocol'] if protocol is None else int(protocol)
250 _kwds = kwds.copy()
251 _kwds.update(dict(byref=byref, fmode=fmode, recurse=recurse))
252 Pickler(file, protocol, **_kwds).dump(obj)
253 return
255def dumps(obj, protocol=None, byref=None, fmode=None, recurse=None, **kwds):#, strictio=None):
256 """
257 Pickle an object to a string.
259 *protocol* is the pickler protocol, as defined for Python *pickle*.
261 If *byref=True*, then dill behaves a lot more like pickle as certain
262 objects (like modules) are pickled by reference as opposed to attempting
263 to pickle the object itself.
265 If *recurse=True*, then objects referred to in the global dictionary
266 are recursively traced and pickled, instead of the default behavior
267 of attempting to store the entire global dictionary. This is needed for
268 functions defined via *exec()*.
270 *fmode* (:const:`HANDLE_FMODE`, :const:`CONTENTS_FMODE`,
271 or :const:`FILE_FMODE`) indicates how file handles will be pickled.
272 For example, when pickling a data file handle for transfer to a remote
273 compute service, *FILE_FMODE* will include the file contents in the
274 pickle and cursor position so that a remote method can operate
275 transparently on an object with an open file handle.
277 Default values for keyword arguments can be set in :mod:`dill.settings`.
278 """
279 file = StringIO()
280 dump(obj, file, protocol, byref, fmode, recurse, **kwds)#, strictio)
281 return file.getvalue()
283def load(file, ignore=None, **kwds):
284 """
285 Unpickle an object from a file.
287 See :func:`loads` for keyword arguments.
288 """
289 return Unpickler(file, ignore=ignore, **kwds).load()
291def loads(str, ignore=None, **kwds):
292 """
293 Unpickle an object from a string.
295 If *ignore=False* then objects whose class is defined in the module
296 *__main__* are updated to reference the existing class in *__main__*,
297 otherwise they are left to refer to the reconstructed type, which may
298 be different.
300 Default values for keyword arguments can be set in :mod:`dill.settings`.
301 """
302 file = StringIO(str)
303 return load(file, ignore, **kwds)
305# def dumpzs(obj, protocol=None):
306# """pickle an object to a compressed string"""
307# return zlib.compress(dumps(obj, protocol))
309# def loadzs(str):
310# """unpickle an object from a compressed string"""
311# return loads(zlib.decompress(str))
313### End: Shorthands ###
315class MetaCatchingDict(dict):
316 def get(self, key, default=None):
317 try:
318 return self[key]
319 except KeyError:
320 return default
322 def __missing__(self, key):
323 if issubclass(key, type):
324 return save_type
325 else:
326 raise KeyError()
328class PickleWarning(Warning, PickleError):
329 pass
331class PicklingWarning(PickleWarning, PicklingError):
332 pass
334class UnpicklingWarning(PickleWarning, UnpicklingError):
335 pass
337### Extend the Picklers
338class Pickler(StockPickler):
339 """python's Pickler extended to interpreter sessions"""
340 dispatch: typing.Dict[type, typing.Callable[[Pickler, typing.Any], None]] \
341 = MetaCatchingDict(StockPickler.dispatch.copy())
342 """The dispatch table, a dictionary of serializing functions used
343 by Pickler to save objects of specific types. Use :func:`pickle`
344 or :func:`register` to associate types to custom functions.
346 :meta hide-value:
347 """
348 _session = False
349 from .settings import settings
351 def __init__(self, file, *args, **kwds):
352 settings = Pickler.settings
353 _byref = kwds.pop('byref', None)
354 #_strictio = kwds.pop('strictio', None)
355 _fmode = kwds.pop('fmode', None)
356 _recurse = kwds.pop('recurse', None)
357 StockPickler.__init__(self, file, *args, **kwds)
358 self._main = _main_module
359 self._diff_cache = {}
360 self._byref = settings['byref'] if _byref is None else _byref
361 self._strictio = False #_strictio
362 self._fmode = settings['fmode'] if _fmode is None else _fmode
363 self._recurse = settings['recurse'] if _recurse is None else _recurse
364 self._postproc = OrderedDict()
365 self._file = file
367 def save(self, obj, save_persistent_id=True):
368 # numpy hack
369 obj_type = type(obj)
370 if NumpyArrayType and not (obj_type is type or obj_type in Pickler.dispatch):
371 # register if the object is a numpy ufunc
372 # thanks to Paul Kienzle for pointing out ufuncs didn't pickle
373 if numpyufunc(obj_type):
374 @register(obj_type)
375 def save_numpy_ufunc(pickler, obj):
376 logger.trace(pickler, "Nu: %s", obj)
377 name = getattr(obj, '__qualname__', getattr(obj, '__name__', None))
378 StockPickler.save_global(pickler, obj, name=name)
379 logger.trace(pickler, "# Nu")
380 return
381 # NOTE: the above 'save' performs like:
382 # import copy_reg
383 # def udump(f): return f.__name__
384 # def uload(name): return getattr(numpy, name)
385 # copy_reg.pickle(NumpyUfuncType, udump, uload)
386 # register if the object is a numpy dtype
387 if numpydtype(obj_type):
388 @register(obj_type)
389 def save_numpy_dtype(pickler, obj):
390 logger.trace(pickler, "Dt: %s", obj)
391 pickler.save_reduce(_create_dtypemeta, (obj.type,), obj=obj)
392 logger.trace(pickler, "# Dt")
393 return
394 # NOTE: the above 'save' performs like:
395 # import copy_reg
396 # def uload(name): return type(NumpyDType(name))
397 # def udump(f): return uload, (f.type,)
398 # copy_reg.pickle(NumpyDTypeType, udump, uload)
399 # register if the object is a subclassed numpy array instance
400 if ndarraysubclassinstance(obj_type):
401 @register(obj_type)
402 def save_numpy_array(pickler, obj):
403 logger.trace(pickler, "Nu: (%s, %s)", obj.shape, obj.dtype)
404 npdict = getattr(obj, '__dict__', None)
405 f, args, state = obj.__reduce__()
406 pickler.save_reduce(_create_array, (f,args,state,npdict), obj=obj)
407 logger.trace(pickler, "# Nu")
408 return
409 # end numpy hack
411 if GENERATOR_FAIL and obj_type is GeneratorType:
412 msg = "Can't pickle %s: attribute lookup builtins.generator failed" % GeneratorType
413 raise PicklingError(msg)
414 StockPickler.save(self, obj, save_persistent_id)
416 save.__doc__ = StockPickler.save.__doc__
418 def dump(self, obj): #NOTE: if settings change, need to update attributes
419 logger.trace_setup(self)
420 StockPickler.dump(self, obj)
421 dump.__doc__ = StockPickler.dump.__doc__
423class Unpickler(StockUnpickler):
424 """python's Unpickler extended to interpreter sessions and more types"""
425 from .settings import settings
426 _session = False
428 def find_class(self, module, name):
429 if (module, name) == ('__builtin__', '__main__'):
430 return self._main.__dict__ #XXX: above set w/save_module_dict
431 elif (module, name) == ('__builtin__', 'NoneType'):
432 return type(None) #XXX: special case: NoneType missing
433 if module == 'dill.dill': module = 'dill._dill'
434 return StockUnpickler.find_class(self, module, name)
436 def __init__(self, *args, **kwds):
437 settings = Pickler.settings
438 _ignore = kwds.pop('ignore', None)
439 StockUnpickler.__init__(self, *args, **kwds)
440 self._main = _main_module
441 self._ignore = settings['ignore'] if _ignore is None else _ignore
443 def load(self): #NOTE: if settings change, need to update attributes
444 obj = StockUnpickler.load(self)
445 if type(obj).__module__ == getattr(_main_module, '__name__', '__main__'):
446 if not self._ignore:
447 # point obj class to main
448 try: obj.__class__ = getattr(self._main, type(obj).__name__)
449 except (AttributeError,TypeError): pass # defined in a file
450 #_main_module.__dict__.update(obj.__dict__) #XXX: should update globals ?
451 return obj
452 load.__doc__ = StockUnpickler.load.__doc__
453 pass
455'''
456def dispatch_table():
457 """get the dispatch table of registered types"""
458 return Pickler.dispatch
459'''
461pickle_dispatch_copy = StockPickler.dispatch.copy()
463def pickle(t, func):
464 """expose :attr:`~Pickler.dispatch` table for user-created extensions"""
465 Pickler.dispatch[t] = func
466 return
468def register(t):
469 """decorator to register types to Pickler's :attr:`~Pickler.dispatch` table"""
470 def proxy(func):
471 Pickler.dispatch[t] = func
472 return func
473 return proxy
475def _revert_extension():
476 """drop dill-registered types from pickle's dispatch table"""
477 for type, func in list(StockPickler.dispatch.items()):
478 if func.__module__ == __name__:
479 del StockPickler.dispatch[type]
480 if type in pickle_dispatch_copy:
481 StockPickler.dispatch[type] = pickle_dispatch_copy[type]
483def use_diff(on=True):
484 """
485 Reduces size of pickles by only including object which have changed.
487 Decreases pickle size but increases CPU time needed.
488 Also helps avoid some unpickleable objects.
489 MUST be called at start of script, otherwise changes will not be recorded.
490 """
491 global _use_diff, diff
492 _use_diff = on
493 if _use_diff and diff is None:
494 try:
495 from . import diff as d
496 except ImportError:
497 import diff as d
498 diff = d
500def _create_typemap():
501 import types
502 d = dict(list(__builtin__.__dict__.items()) + \
503 list(types.__dict__.items())).items()
504 for key, value in d:
505 if getattr(value, '__module__', None) == 'builtins' \
506 and type(value) is type:
507 yield key, value
508 return
509_reverse_typemap = dict(_create_typemap())
510_reverse_typemap.update({
511 'PartialType': PartialType,
512 'SuperType': SuperType,
513 'ItemGetterType': ItemGetterType,
514 'AttrGetterType': AttrGetterType,
515})
516if sys.hexversion < 0x30800a2:
517 _reverse_typemap.update({
518 'CellType': CellType,
519 })
521# "Incidental" implementation specific types. Unpickling these types in another
522# implementation of Python (PyPy -> CPython) is not guaranteed to work
524# This dictionary should contain all types that appear in Python implementations
525# but are not defined in https://docs.python.org/3/library/types.html#standard-interpreter-types
526x=OrderedDict()
527_incedental_reverse_typemap = {
528 'FileType': FileType,
529 'BufferedRandomType': BufferedRandomType,
530 'BufferedReaderType': BufferedReaderType,
531 'BufferedWriterType': BufferedWriterType,
532 'TextWrapperType': TextWrapperType,
533 'PyBufferedRandomType': PyBufferedRandomType,
534 'PyBufferedReaderType': PyBufferedReaderType,
535 'PyBufferedWriterType': PyBufferedWriterType,
536 'PyTextWrapperType': PyTextWrapperType,
537}
539_incedental_reverse_typemap.update({
540 "DictKeysType": type({}.keys()),
541 "DictValuesType": type({}.values()),
542 "DictItemsType": type({}.items()),
544 "OdictKeysType": type(x.keys()),
545 "OdictValuesType": type(x.values()),
546 "OdictItemsType": type(x.items()),
547})
549if ExitType:
550 _incedental_reverse_typemap['ExitType'] = ExitType
551if InputType:
552 _incedental_reverse_typemap['InputType'] = InputType
553 _incedental_reverse_typemap['OutputType'] = OutputType
555'''
556try:
557 import symtable
558 _incedental_reverse_typemap["SymtableEntryType"] = type(symtable.symtable("", "string", "exec")._table)
559except: #FIXME: fails to pickle
560 pass
562if sys.hexversion >= 0x30a00a0:
563 _incedental_reverse_typemap['LineIteratorType'] = type(compile('3', '', 'eval').co_lines())
564'''
566if sys.hexversion >= 0x30b00b0:
567 from types import GenericAlias
568 _incedental_reverse_typemap["GenericAliasIteratorType"] = type(iter(GenericAlias(list, (int,))))
569 '''
570 _incedental_reverse_typemap['PositionsIteratorType'] = type(compile('3', '', 'eval').co_positions())
571 '''
573try:
574 import winreg
575 _incedental_reverse_typemap["HKEYType"] = winreg.HKEYType
576except ImportError:
577 pass
579_reverse_typemap.update(_incedental_reverse_typemap)
580_incedental_types = set(_incedental_reverse_typemap.values())
582del x
584_typemap = dict((v, k) for k, v in _reverse_typemap.items())
586def _unmarshal(string):
587 return marshal.loads(string)
589def _load_type(name):
590 return _reverse_typemap[name]
592def _create_type(typeobj, *args):
593 return typeobj(*args)
595def _create_function(fcode, fglobals, fname=None, fdefaults=None,
596 fclosure=None, fdict=None, fkwdefaults=None):
597 # same as FunctionType, but enable passing __dict__ to new function,
598 # __dict__ is the storehouse for attributes added after function creation
599 func = FunctionType(fcode, fglobals or dict(), fname, fdefaults, fclosure)
600 if fdict is not None:
601 func.__dict__.update(fdict) #XXX: better copy? option to copy?
602 if fkwdefaults is not None:
603 func.__kwdefaults__ = fkwdefaults
604 # 'recurse' only stores referenced modules/objects in fglobals,
605 # thus we need to make sure that we have __builtins__ as well
606 if "__builtins__" not in func.__globals__:
607 func.__globals__["__builtins__"] = globals()["__builtins__"]
608 # assert id(fglobals) == id(func.__globals__)
609 return func
611class match:
612 """
613 Make avaialable a limited structural pattern matching-like syntax for Python < 3.10
615 Patterns can be only tuples (without types) currently.
616 Inspired by the package pattern-matching-PEP634.
618 Usage:
619 >>> with match(args) as m:
620 >>> if m.case(('x', 'y')):
621 >>> # use m.x and m.y
622 >>> elif m.case(('x', 'y', 'z')):
623 >>> # use m.x, m.y and m.z
625 Equivalent native code for Python >= 3.10:
626 >>> match args:
627 >>> case (x, y):
628 >>> # use x and y
629 >>> case (x, y, z):
630 >>> # use x, y and z
631 """
632 def __init__(self, value):
633 self.value = value
634 self._fields = None
635 def __enter__(self):
636 return self
637 def __exit__(self, *exc_info):
638 return False
639 def case(self, args): # *args, **kwargs):
640 """just handles tuple patterns"""
641 if len(self.value) != len(args): # + len(kwargs):
642 return False
643 #if not all(isinstance(arg, pat) for arg, pat in zip(self.value[len(args):], kwargs.values())):
644 # return False
645 self.args = args # (*args, *kwargs)
646 return True
647 @property
648 def fields(self):
649 # Only bind names to values if necessary.
650 if self._fields is None:
651 self._fields = dict(zip(self.args, self.value))
652 return self._fields
653 def __getattr__(self, item):
654 return self.fields[item]
656ALL_CODE_PARAMS = [
657 # Version New attribute CodeType parameters
658 ((3,11,'a'), 'co_endlinetable', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name qualname firstlineno linetable endlinetable columntable exceptiontable freevars cellvars'),
659 ((3,11), 'co_exceptiontable', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name qualname firstlineno linetable exceptiontable freevars cellvars'),
660 ((3,10), 'co_linetable', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name firstlineno linetable freevars cellvars'),
661 ((3,8), 'co_posonlyargcount', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name firstlineno lnotab freevars cellvars'),
662 ((3,7), 'co_kwonlyargcount', 'argcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name firstlineno lnotab freevars cellvars'),
663 ]
664for version, new_attr, params in ALL_CODE_PARAMS:
665 if hasattr(CodeType, new_attr):
666 CODE_VERSION = version
667 CODE_PARAMS = params.split()
668 break
669ENCODE_PARAMS = set(CODE_PARAMS).intersection(
670 ['code', 'lnotab', 'linetable', 'endlinetable', 'columntable', 'exceptiontable'])
672def _create_code(*args):
673 if not isinstance(args[0], int): # co_lnotab stored from >= 3.10
674 LNOTAB, *args = args
675 else: # from < 3.10 (or pre-LNOTAB storage)
676 LNOTAB = b''
678 with match(args) as m:
679 # Python 3.11/3.12a (18 members)
680 if m.case((
681 'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6]
682 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'qualname', 'firstlineno', # args[6:14]
683 'linetable', 'exceptiontable', 'freevars', 'cellvars' # args[14:]
684 )):
685 if CODE_VERSION == (3,11):
686 return CodeType(
687 *args[:6],
688 args[6].encode() if hasattr(args[6], 'encode') else args[6], # code
689 *args[7:14],
690 args[14].encode() if hasattr(args[14], 'encode') else args[14], # linetable
691 args[15].encode() if hasattr(args[15], 'encode') else args[15], # exceptiontable
692 args[16],
693 args[17],
694 )
695 fields = m.fields
696 # Python 3.10 or 3.8/3.9 (16 members)
697 elif m.case((
698 'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6]
699 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'firstlineno', # args[6:13]
700 'LNOTAB_OR_LINETABLE', 'freevars', 'cellvars' # args[13:]
701 )):
702 if CODE_VERSION == (3,10) or CODE_VERSION == (3,8):
703 return CodeType(
704 *args[:6],
705 args[6].encode() if hasattr(args[6], 'encode') else args[6], # code
706 *args[7:13],
707 args[13].encode() if hasattr(args[13], 'encode') else args[13], # lnotab/linetable
708 args[14],
709 args[15],
710 )
711 fields = m.fields
712 if CODE_VERSION >= (3,10):
713 fields['linetable'] = m.LNOTAB_OR_LINETABLE
714 else:
715 fields['lnotab'] = LNOTAB if LNOTAB else m.LNOTAB_OR_LINETABLE
716 # Python 3.7 (15 args)
717 elif m.case((
718 'argcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:5]
719 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'firstlineno', # args[5:12]
720 'lnotab', 'freevars', 'cellvars' # args[12:]
721 )):
722 if CODE_VERSION == (3,7):
723 return CodeType(
724 *args[:5],
725 args[5].encode() if hasattr(args[5], 'encode') else args[5], # code
726 *args[6:12],
727 args[12].encode() if hasattr(args[12], 'encode') else args[12], # lnotab
728 args[13],
729 args[14],
730 )
731 fields = m.fields
732 # Python 3.11a (20 members)
733 elif m.case((
734 'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6]
735 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'qualname', 'firstlineno', # args[6:14]
736 'linetable', 'endlinetable', 'columntable', 'exceptiontable', 'freevars', 'cellvars' # args[14:]
737 )):
738 if CODE_VERSION == (3,11,'a'):
739 return CodeType(
740 *args[:6],
741 args[6].encode() if hasattr(args[6], 'encode') else args[6], # code
742 *args[7:14],
743 *(a.encode() if hasattr(a, 'encode') else a for a in args[14:18]), # linetable-exceptiontable
744 args[18],
745 args[19],
746 )
747 fields = m.fields
748 else:
749 raise UnpicklingError("pattern match for code object failed")
751 # The args format doesn't match this version.
752 fields.setdefault('posonlyargcount', 0) # from python <= 3.7
753 fields.setdefault('lnotab', LNOTAB) # from python >= 3.10
754 fields.setdefault('linetable', b'') # from python <= 3.9
755 fields.setdefault('qualname', fields['name']) # from python <= 3.10
756 fields.setdefault('exceptiontable', b'') # from python <= 3.10
757 fields.setdefault('endlinetable', None) # from python != 3.11a
758 fields.setdefault('columntable', None) # from python != 3.11a
760 args = (fields[k].encode() if k in ENCODE_PARAMS and hasattr(fields[k], 'encode') else fields[k]
761 for k in CODE_PARAMS)
762 return CodeType(*args)
764def _create_ftype(ftypeobj, func, args, kwds):
765 if kwds is None:
766 kwds = {}
767 if args is None:
768 args = ()
769 return ftypeobj(func, *args, **kwds)
771def _create_typing_tuple(argz, *args): #NOTE: workaround python/cpython#94245
772 if not argz:
773 return typing.Tuple[()].copy_with(())
774 if argz == ((),):
775 return typing.Tuple[()]
776 return typing.Tuple[argz]
778def _create_lock(locked, *args): #XXX: ignores 'blocking'
779 from threading import Lock
780 lock = Lock()
781 if locked:
782 if not lock.acquire(False):
783 raise UnpicklingError("Cannot acquire lock")
784 return lock
786def _create_rlock(count, owner, *args): #XXX: ignores 'blocking'
787 lock = RLockType()
788 if owner is not None:
789 lock._acquire_restore((count, owner))
790 if owner and not lock._is_owned():
791 raise UnpicklingError("Cannot acquire lock")
792 return lock
794# thanks to matsjoyce for adding all the different file modes
795def _create_filehandle(name, mode, position, closed, open, strictio, fmode, fdata): # buffering=0
796 # only pickles the handle, not the file contents... good? or StringIO(data)?
797 # (for file contents see: http://effbot.org/librarybook/copy-reg.htm)
798 # NOTE: handle special cases first (are there more special cases?)
799 names = {'<stdin>':sys.__stdin__, '<stdout>':sys.__stdout__,
800 '<stderr>':sys.__stderr__} #XXX: better fileno=(0,1,2) ?
801 if name in list(names.keys()):
802 f = names[name] #XXX: safer "f=sys.stdin"
803 elif name == '<tmpfile>':
804 f = os.tmpfile()
805 elif name == '<fdopen>':
806 import tempfile
807 f = tempfile.TemporaryFile(mode)
808 else:
809 try:
810 exists = os.path.exists(name)
811 except Exception:
812 exists = False
813 if not exists:
814 if strictio:
815 raise FileNotFoundError("[Errno 2] No such file or directory: '%s'" % name)
816 elif "r" in mode and fmode != FILE_FMODE:
817 name = '<fdopen>' # or os.devnull?
818 current_size = 0 # or maintain position?
819 else:
820 current_size = os.path.getsize(name)
822 if position > current_size:
823 if strictio:
824 raise ValueError("invalid buffer size")
825 elif fmode == CONTENTS_FMODE:
826 position = current_size
827 # try to open the file by name
828 # NOTE: has different fileno
829 try:
830 #FIXME: missing: *buffering*, encoding, softspace
831 if fmode == FILE_FMODE:
832 f = open(name, mode if "w" in mode else "w")
833 f.write(fdata)
834 if "w" not in mode:
835 f.close()
836 f = open(name, mode)
837 elif name == '<fdopen>': # file did not exist
838 import tempfile
839 f = tempfile.TemporaryFile(mode)
840 # treat x mode as w mode
841 elif fmode == CONTENTS_FMODE \
842 and ("w" in mode or "x" in mode):
843 # stop truncation when opening
844 flags = os.O_CREAT
845 if "+" in mode:
846 flags |= os.O_RDWR
847 else:
848 flags |= os.O_WRONLY
849 f = os.fdopen(os.open(name, flags), mode)
850 # set name to the correct value
851 r = getattr(f, "buffer", f)
852 r = getattr(r, "raw", r)
853 r.name = name
854 assert f.name == name
855 else:
856 f = open(name, mode)
857 except (IOError, FileNotFoundError):
858 err = sys.exc_info()[1]
859 raise UnpicklingError(err)
860 if closed:
861 f.close()
862 elif position >= 0 and fmode != HANDLE_FMODE:
863 f.seek(position)
864 return f
866def _create_stringi(value, position, closed):
867 f = StringIO(value)
868 if closed: f.close()
869 else: f.seek(position)
870 return f
872def _create_stringo(value, position, closed):
873 f = StringIO()
874 if closed: f.close()
875 else:
876 f.write(value)
877 f.seek(position)
878 return f
880class _itemgetter_helper(object):
881 def __init__(self):
882 self.items = []
883 def __getitem__(self, item):
884 self.items.append(item)
885 return
887class _attrgetter_helper(object):
888 def __init__(self, attrs, index=None):
889 self.attrs = attrs
890 self.index = index
891 def __getattribute__(self, attr):
892 attrs = object.__getattribute__(self, "attrs")
893 index = object.__getattribute__(self, "index")
894 if index is None:
895 index = len(attrs)
896 attrs.append(attr)
897 else:
898 attrs[index] = ".".join([attrs[index], attr])
899 return type(self)(attrs, index)
901class _dictproxy_helper(dict):
902 def __ror__(self, a):
903 return a
905_dictproxy_helper_instance = _dictproxy_helper()
907__d = {}
908try:
909 # In CPython 3.9 and later, this trick can be used to exploit the
910 # implementation of the __or__ function of MappingProxyType to get the true
911 # mapping referenced by the proxy. It may work for other implementations,
912 # but is not guaranteed.
913 MAPPING_PROXY_TRICK = __d is (DictProxyType(__d) | _dictproxy_helper_instance)
914except Exception:
915 MAPPING_PROXY_TRICK = False
916del __d
918# _CELL_REF and _CELL_EMPTY are used to stay compatible with versions of dill
919# whose _create_cell functions do not have a default value.
920# _CELL_REF can be safely removed entirely (replaced by empty tuples for calls
921# to _create_cell) once breaking changes are allowed.
922_CELL_REF = None
923_CELL_EMPTY = Sentinel('_CELL_EMPTY')
925def _create_cell(contents=None):
926 if contents is not _CELL_EMPTY:
927 value = contents
928 return (lambda: value).__closure__[0]
930def _create_weakref(obj, *args):
931 from weakref import ref
932 if obj is None: # it's dead
933 from collections import UserDict
934 return ref(UserDict(), *args)
935 return ref(obj, *args)
937def _create_weakproxy(obj, callable=False, *args):
938 from weakref import proxy
939 if obj is None: # it's dead
940 if callable: return proxy(lambda x:x, *args)
941 from collections import UserDict
942 return proxy(UserDict(), *args)
943 return proxy(obj, *args)
945def _eval_repr(repr_str):
946 return eval(repr_str)
948def _create_array(f, args, state, npdict=None):
949 #array = numpy.core.multiarray._reconstruct(*args)
950 array = f(*args)
951 array.__setstate__(state)
952 if npdict is not None: # we also have saved state in __dict__
953 array.__dict__.update(npdict)
954 return array
956def _create_dtypemeta(scalar_type):
957 if NumpyDType is True: __hook__() # a bit hacky I think
958 if scalar_type is None:
959 return NumpyDType
960 return type(NumpyDType(scalar_type))
962def _create_namedtuple(name, fieldnames, modulename, defaults=None):
963 class_ = _import_module(modulename + '.' + name, safe=True)
964 if class_ is not None:
965 return class_
966 import collections
967 t = collections.namedtuple(name, fieldnames, defaults=defaults, module=modulename)
968 return t
970def _create_capsule(pointer, name, context, destructor):
971 attr_found = False
972 try:
973 # based on https://github.com/python/cpython/blob/f4095e53ab708d95e019c909d5928502775ba68f/Objects/capsule.c#L209-L231
974 uname = name.decode('utf8')
975 for i in range(1, uname.count('.')+1):
976 names = uname.rsplit('.', i)
977 try:
978 module = __import__(names[0])
979 except ImportError:
980 pass
981 obj = module
982 for attr in names[1:]:
983 obj = getattr(obj, attr)
984 capsule = obj
985 attr_found = True
986 break
987 except Exception:
988 pass
990 if attr_found:
991 if _PyCapsule_IsValid(capsule, name):
992 return capsule
993 raise UnpicklingError("%s object exists at %s but a PyCapsule object was expected." % (type(capsule), name))
994 else:
995 #warnings.warn('Creating a new PyCapsule %s for a C data structure that may not be present in memory. Segmentation faults or other memory errors are possible.' % (name,), UnpicklingWarning)
996 capsule = _PyCapsule_New(pointer, name, destructor)
997 _PyCapsule_SetContext(capsule, context)
998 return capsule
1000def _getattr(objclass, name, repr_str):
1001 # hack to grab the reference directly
1002 try: #XXX: works only for __builtin__ ?
1003 attr = repr_str.split("'")[3]
1004 return eval(attr+'.__dict__["'+name+'"]')
1005 except Exception:
1006 try:
1007 attr = objclass.__dict__
1008 if type(attr) is DictProxyType:
1009 attr = attr[name]
1010 else:
1011 attr = getattr(objclass,name)
1012 except (AttributeError, KeyError):
1013 attr = getattr(objclass,name)
1014 return attr
1016def _get_attr(self, name):
1017 # stop recursive pickling
1018 return getattr(self, name, None) or getattr(__builtin__, name)
1020def _import_module(import_name, safe=False):
1021 try:
1022 if import_name.startswith('__runtime__.'):
1023 return sys.modules[import_name]
1024 elif '.' in import_name:
1025 items = import_name.split('.')
1026 module = '.'.join(items[:-1])
1027 obj = items[-1]
1028 submodule = getattr(__import__(module, None, None, [obj]), obj)
1029 if isinstance(submodule, (ModuleType, type)):
1030 return submodule
1031 return __import__(import_name, None, None, [obj])
1032 else:
1033 return __import__(import_name)
1034 except (ImportError, AttributeError, KeyError):
1035 if safe:
1036 return None
1037 raise
1039# https://github.com/python/cpython/blob/a8912a0f8d9eba6d502c37d522221f9933e976db/Lib/pickle.py#L322-L333
1040def _getattribute(obj, name):
1041 for subpath in name.split('.'):
1042 if subpath == '<locals>':
1043 raise AttributeError("Can't get local attribute {!r} on {!r}"
1044 .format(name, obj))
1045 try:
1046 parent = obj
1047 obj = getattr(obj, subpath)
1048 except AttributeError:
1049 raise AttributeError("Can't get attribute {!r} on {!r}"
1050 .format(name, obj))
1051 return obj, parent
1053def _locate_function(obj, pickler=None):
1054 module_name = getattr(obj, '__module__', None)
1055 if module_name in ['__main__', None] or \
1056 pickler and is_dill(pickler, child=False) and pickler._session and module_name == pickler._main.__name__:
1057 return False
1058 if hasattr(obj, '__qualname__'):
1059 module = _import_module(module_name, safe=True)
1060 try:
1061 found, _ = _getattribute(module, obj.__qualname__)
1062 return found is obj
1063 except AttributeError:
1064 return False
1065 else:
1066 found = _import_module(module_name + '.' + obj.__name__, safe=True)
1067 return found is obj
1070def _setitems(dest, source):
1071 for k, v in source.items():
1072 dest[k] = v
1075def _save_with_postproc(pickler, reduction, is_pickler_dill=None, obj=Getattr.NO_DEFAULT, postproc_list=None):
1076 if obj is Getattr.NO_DEFAULT:
1077 obj = Reduce(reduction) # pragma: no cover
1079 if is_pickler_dill is None:
1080 is_pickler_dill = is_dill(pickler, child=True)
1081 if is_pickler_dill:
1082 # assert id(obj) not in pickler._postproc, str(obj) + ' already pushed on stack!'
1083 # if not hasattr(pickler, 'x'): pickler.x = 0
1084 # print(pickler.x*' ', 'push', obj, id(obj), pickler._recurse)
1085 # pickler.x += 1
1086 if postproc_list is None:
1087 postproc_list = []
1089 # Recursive object not supported. Default to a global instead.
1090 if id(obj) in pickler._postproc:
1091 name = '%s.%s ' % (obj.__module__, getattr(obj, '__qualname__', obj.__name__)) if hasattr(obj, '__module__') else ''
1092 warnings.warn('Cannot pickle %r: %shas recursive self-references that trigger a RecursionError.' % (obj, name), PicklingWarning)
1093 pickler.save_global(obj)
1094 return
1095 pickler._postproc[id(obj)] = postproc_list
1097 # TODO: Use state_setter in Python 3.8 to allow for faster cPickle implementations
1098 pickler.save_reduce(*reduction, obj=obj)
1100 if is_pickler_dill:
1101 # pickler.x -= 1
1102 # print(pickler.x*' ', 'pop', obj, id(obj))
1103 postproc = pickler._postproc.pop(id(obj))
1104 # assert postproc_list == postproc, 'Stack tampered!'
1105 for reduction in reversed(postproc):
1106 if reduction[0] is _setitems:
1107 # use the internal machinery of pickle.py to speedup when
1108 # updating a dictionary in postproc
1109 dest, source = reduction[1]
1110 if source:
1111 pickler.write(pickler.get(pickler.memo[id(dest)][0]))
1112 pickler._batch_setitems(iter(source.items()))
1113 else:
1114 # Updating with an empty dictionary. Same as doing nothing.
1115 continue
1116 else:
1117 pickler.save_reduce(*reduction)
1118 # pop None created by calling preprocessing step off stack
1119 pickler.write(POP)
1121#@register(CodeType)
1122#def save_code(pickler, obj):
1123# logger.trace(pickler, "Co: %s", obj)
1124# pickler.save_reduce(_unmarshal, (marshal.dumps(obj),), obj=obj)
1125# logger.trace(pickler, "# Co")
1126# return
1128# The following function is based on 'save_codeobject' from 'cloudpickle'
1129# Copyright (c) 2012, Regents of the University of California.
1130# Copyright (c) 2009 `PiCloud, Inc. <http://www.picloud.com>`_.
1131# License: https://github.com/cloudpipe/cloudpickle/blob/master/LICENSE
1132@register(CodeType)
1133def save_code(pickler, obj):
1134 logger.trace(pickler, "Co: %s", obj)
1135 if hasattr(obj, "co_endlinetable"): # python 3.11a (20 args)
1136 args = (
1137 obj.co_lnotab, # for < python 3.10 [not counted in args]
1138 obj.co_argcount, obj.co_posonlyargcount,
1139 obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize,
1140 obj.co_flags, obj.co_code, obj.co_consts, obj.co_names,
1141 obj.co_varnames, obj.co_filename, obj.co_name, obj.co_qualname,
1142 obj.co_firstlineno, obj.co_linetable, obj.co_endlinetable,
1143 obj.co_columntable, obj.co_exceptiontable, obj.co_freevars,
1144 obj.co_cellvars
1145 )
1146 elif hasattr(obj, "co_exceptiontable"): # python 3.11 (18 args)
1147 with warnings.catch_warnings():
1148 if not OLD312a7: # issue 597
1149 warnings.filterwarnings('ignore', category=DeprecationWarning)
1150 args = (
1151 obj.co_lnotab, # for < python 3.10 [not counted in args]
1152 obj.co_argcount, obj.co_posonlyargcount,
1153 obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize,
1154 obj.co_flags, obj.co_code, obj.co_consts, obj.co_names,
1155 obj.co_varnames, obj.co_filename, obj.co_name, obj.co_qualname,
1156 obj.co_firstlineno, obj.co_linetable, obj.co_exceptiontable,
1157 obj.co_freevars, obj.co_cellvars
1158 )
1159 elif hasattr(obj, "co_linetable"): # python 3.10 (16 args)
1160 args = (
1161 obj.co_lnotab, # for < python 3.10 [not counted in args]
1162 obj.co_argcount, obj.co_posonlyargcount,
1163 obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize,
1164 obj.co_flags, obj.co_code, obj.co_consts, obj.co_names,
1165 obj.co_varnames, obj.co_filename, obj.co_name,
1166 obj.co_firstlineno, obj.co_linetable, obj.co_freevars,
1167 obj.co_cellvars
1168 )
1169 elif hasattr(obj, "co_posonlyargcount"): # python 3.8 (16 args)
1170 args = (
1171 obj.co_argcount, obj.co_posonlyargcount,
1172 obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize,
1173 obj.co_flags, obj.co_code, obj.co_consts, obj.co_names,
1174 obj.co_varnames, obj.co_filename, obj.co_name,
1175 obj.co_firstlineno, obj.co_lnotab, obj.co_freevars,
1176 obj.co_cellvars
1177 )
1178 else: # python 3.7 (15 args)
1179 args = (
1180 obj.co_argcount, obj.co_kwonlyargcount, obj.co_nlocals,
1181 obj.co_stacksize, obj.co_flags, obj.co_code, obj.co_consts,
1182 obj.co_names, obj.co_varnames, obj.co_filename,
1183 obj.co_name, obj.co_firstlineno, obj.co_lnotab,
1184 obj.co_freevars, obj.co_cellvars
1185 )
1187 pickler.save_reduce(_create_code, args, obj=obj)
1188 logger.trace(pickler, "# Co")
1189 return
1191def _repr_dict(obj):
1192 """Make a short string representation of a dictionary."""
1193 return "<%s object at %#012x>" % (type(obj).__name__, id(obj))
1195@register(dict)
1196def save_module_dict(pickler, obj):
1197 if is_dill(pickler, child=False) and obj == pickler._main.__dict__ and \
1198 not (pickler._session and pickler._first_pass):
1199 logger.trace(pickler, "D1: %s", _repr_dict(obj)) # obj
1200 pickler.write(bytes('c__builtin__\n__main__\n', 'UTF-8'))
1201 logger.trace(pickler, "# D1")
1202 elif (not is_dill(pickler, child=False)) and (obj == _main_module.__dict__):
1203 logger.trace(pickler, "D3: %s", _repr_dict(obj)) # obj
1204 pickler.write(bytes('c__main__\n__dict__\n', 'UTF-8')) #XXX: works in general?
1205 logger.trace(pickler, "# D3")
1206 elif '__name__' in obj and obj != _main_module.__dict__ \
1207 and type(obj['__name__']) is str \
1208 and obj is getattr(_import_module(obj['__name__'],True), '__dict__', None):
1209 logger.trace(pickler, "D4: %s", _repr_dict(obj)) # obj
1210 pickler.write(bytes('c%s\n__dict__\n' % obj['__name__'], 'UTF-8'))
1211 logger.trace(pickler, "# D4")
1212 else:
1213 logger.trace(pickler, "D2: %s", _repr_dict(obj)) # obj
1214 if is_dill(pickler, child=False) and pickler._session:
1215 # we only care about session the first pass thru
1216 pickler._first_pass = False
1217 StockPickler.save_dict(pickler, obj)
1218 logger.trace(pickler, "# D2")
1219 return
1222if not OLD310 and MAPPING_PROXY_TRICK:
1223 def save_dict_view(dicttype):
1224 def save_dict_view_for_function(func):
1225 def _save_dict_view(pickler, obj):
1226 logger.trace(pickler, "Dkvi: <%s>", obj)
1227 mapping = obj.mapping | _dictproxy_helper_instance
1228 pickler.save_reduce(func, (mapping,), obj=obj)
1229 logger.trace(pickler, "# Dkvi")
1230 return _save_dict_view
1231 return [
1232 (funcname, save_dict_view_for_function(getattr(dicttype, funcname)))
1233 for funcname in ('keys', 'values', 'items')
1234 ]
1235else:
1236 # The following functions are based on 'cloudpickle'
1237 # https://github.com/cloudpipe/cloudpickle/blob/5d89947288a18029672596a4d719093cc6d5a412/cloudpickle/cloudpickle.py#L922-L940
1238 # Copyright (c) 2012, Regents of the University of California.
1239 # Copyright (c) 2009 `PiCloud, Inc. <http://www.picloud.com>`_.
1240 # License: https://github.com/cloudpipe/cloudpickle/blob/master/LICENSE
1241 def save_dict_view(dicttype):
1242 def save_dict_keys(pickler, obj):
1243 logger.trace(pickler, "Dk: <%s>", obj)
1244 dict_constructor = _shims.Reduce(dicttype.fromkeys, (list(obj),))
1245 pickler.save_reduce(dicttype.keys, (dict_constructor,), obj=obj)
1246 logger.trace(pickler, "# Dk")
1248 def save_dict_values(pickler, obj):
1249 logger.trace(pickler, "Dv: <%s>", obj)
1250 dict_constructor = _shims.Reduce(dicttype, (enumerate(obj),))
1251 pickler.save_reduce(dicttype.values, (dict_constructor,), obj=obj)
1252 logger.trace(pickler, "# Dv")
1254 def save_dict_items(pickler, obj):
1255 logger.trace(pickler, "Di: <%s>", obj)
1256 pickler.save_reduce(dicttype.items, (dicttype(obj),), obj=obj)
1257 logger.trace(pickler, "# Di")
1259 return (
1260 ('keys', save_dict_keys),
1261 ('values', save_dict_values),
1262 ('items', save_dict_items)
1263 )
1265for __dicttype in (
1266 dict,
1267 OrderedDict
1268):
1269 __obj = __dicttype()
1270 for __funcname, __savefunc in save_dict_view(__dicttype):
1271 __tview = type(getattr(__obj, __funcname)())
1272 if __tview not in Pickler.dispatch:
1273 Pickler.dispatch[__tview] = __savefunc
1274del __dicttype, __obj, __funcname, __tview, __savefunc
1277@register(ClassType)
1278def save_classobj(pickler, obj): #FIXME: enable pickler._byref
1279 if not _locate_function(obj, pickler):
1280 logger.trace(pickler, "C1: %s", obj)
1281 pickler.save_reduce(ClassType, (obj.__name__, obj.__bases__,
1282 obj.__dict__), obj=obj)
1283 #XXX: or obj.__dict__.copy()), obj=obj) ?
1284 logger.trace(pickler, "# C1")
1285 else:
1286 logger.trace(pickler, "C2: %s", obj)
1287 name = getattr(obj, '__qualname__', getattr(obj, '__name__', None))
1288 StockPickler.save_global(pickler, obj, name=name)
1289 logger.trace(pickler, "# C2")
1290 return
1292@register(typing._GenericAlias)
1293def save_generic_alias(pickler, obj):
1294 args = obj.__args__
1295 if type(obj.__reduce__()) is str:
1296 logger.trace(pickler, "Ga0: %s", obj)
1297 StockPickler.save_global(pickler, obj, name=obj.__reduce__())
1298 logger.trace(pickler, "# Ga0")
1299 elif obj.__origin__ is tuple and (not args or args == ((),)):
1300 logger.trace(pickler, "Ga1: %s", obj)
1301 pickler.save_reduce(_create_typing_tuple, (args,), obj=obj)
1302 logger.trace(pickler, "# Ga1")
1303 else:
1304 logger.trace(pickler, "Ga2: %s", obj)
1305 StockPickler.save_reduce(pickler, *obj.__reduce__(), obj=obj)
1306 logger.trace(pickler, "# Ga2")
1307 return
1309@register(LockType)
1310def save_lock(pickler, obj):
1311 logger.trace(pickler, "Lo: %s", obj)
1312 pickler.save_reduce(_create_lock, (obj.locked(),), obj=obj)
1313 logger.trace(pickler, "# Lo")
1314 return
1316@register(RLockType)
1317def save_rlock(pickler, obj):
1318 logger.trace(pickler, "RL: %s", obj)
1319 r = obj.__repr__() # don't use _release_save as it unlocks the lock
1320 count = int(r.split('count=')[1].split()[0].rstrip('>'))
1321 owner = int(r.split('owner=')[1].split()[0])
1322 pickler.save_reduce(_create_rlock, (count,owner,), obj=obj)
1323 logger.trace(pickler, "# RL")
1324 return
1326#@register(SocketType) #FIXME: causes multiprocess test_pickling FAIL
1327def save_socket(pickler, obj):
1328 logger.trace(pickler, "So: %s", obj)
1329 pickler.save_reduce(*reduce_socket(obj))
1330 logger.trace(pickler, "# So")
1331 return
1333def _save_file(pickler, obj, open_):
1334 if obj.closed:
1335 position = 0
1336 else:
1337 obj.flush()
1338 if obj in (sys.__stdout__, sys.__stderr__, sys.__stdin__):
1339 position = -1
1340 else:
1341 position = obj.tell()
1342 if is_dill(pickler, child=True) and pickler._fmode == FILE_FMODE:
1343 f = open_(obj.name, "r")
1344 fdata = f.read()
1345 f.close()
1346 else:
1347 fdata = ""
1348 if is_dill(pickler, child=True):
1349 strictio = pickler._strictio
1350 fmode = pickler._fmode
1351 else:
1352 strictio = False
1353 fmode = 0 # HANDLE_FMODE
1354 pickler.save_reduce(_create_filehandle, (obj.name, obj.mode, position,
1355 obj.closed, open_, strictio,
1356 fmode, fdata), obj=obj)
1357 return
1360@register(FileType) #XXX: in 3.x has buffer=0, needs different _create?
1361@register(BufferedReaderType)
1362@register(BufferedWriterType)
1363@register(TextWrapperType)
1364def save_file(pickler, obj):
1365 logger.trace(pickler, "Fi: %s", obj)
1366 f = _save_file(pickler, obj, open)
1367 logger.trace(pickler, "# Fi")
1368 return f
1370if BufferedRandomType:
1371 @register(BufferedRandomType)
1372 def save_file(pickler, obj):
1373 logger.trace(pickler, "Fi: %s", obj)
1374 f = _save_file(pickler, obj, open)
1375 logger.trace(pickler, "# Fi")
1376 return f
1378if PyTextWrapperType:
1379 @register(PyBufferedReaderType)
1380 @register(PyBufferedWriterType)
1381 @register(PyTextWrapperType)
1382 def save_file(pickler, obj):
1383 logger.trace(pickler, "Fi: %s", obj)
1384 f = _save_file(pickler, obj, _open)
1385 logger.trace(pickler, "# Fi")
1386 return f
1388 if PyBufferedRandomType:
1389 @register(PyBufferedRandomType)
1390 def save_file(pickler, obj):
1391 logger.trace(pickler, "Fi: %s", obj)
1392 f = _save_file(pickler, obj, _open)
1393 logger.trace(pickler, "# Fi")
1394 return f
1397# The following two functions are based on 'saveCStringIoInput'
1398# and 'saveCStringIoOutput' from spickle
1399# Copyright (c) 2011 by science+computing ag
1400# License: http://www.apache.org/licenses/LICENSE-2.0
1401if InputType:
1402 @register(InputType)
1403 def save_stringi(pickler, obj):
1404 logger.trace(pickler, "Io: %s", obj)
1405 if obj.closed:
1406 value = ''; position = 0
1407 else:
1408 value = obj.getvalue(); position = obj.tell()
1409 pickler.save_reduce(_create_stringi, (value, position, \
1410 obj.closed), obj=obj)
1411 logger.trace(pickler, "# Io")
1412 return
1414 @register(OutputType)
1415 def save_stringo(pickler, obj):
1416 logger.trace(pickler, "Io: %s", obj)
1417 if obj.closed:
1418 value = ''; position = 0
1419 else:
1420 value = obj.getvalue(); position = obj.tell()
1421 pickler.save_reduce(_create_stringo, (value, position, \
1422 obj.closed), obj=obj)
1423 logger.trace(pickler, "# Io")
1424 return
1426if LRUCacheType is not None:
1427 from functools import lru_cache
1428 @register(LRUCacheType)
1429 def save_lru_cache(pickler, obj):
1430 logger.trace(pickler, "LRU: %s", obj)
1431 if OLD39:
1432 kwargs = obj.cache_info()
1433 args = (kwargs.maxsize,)
1434 else:
1435 kwargs = obj.cache_parameters()
1436 args = (kwargs['maxsize'], kwargs['typed'])
1437 if args != lru_cache.__defaults__:
1438 wrapper = Reduce(lru_cache, args, is_callable=True)
1439 else:
1440 wrapper = lru_cache
1441 pickler.save_reduce(wrapper, (obj.__wrapped__,), obj=obj)
1442 logger.trace(pickler, "# LRU")
1443 return
1445@register(SuperType)
1446def save_super(pickler, obj):
1447 logger.trace(pickler, "Su: %s", obj)
1448 pickler.save_reduce(super, (obj.__thisclass__, obj.__self__), obj=obj)
1449 logger.trace(pickler, "# Su")
1450 return
1452if IS_PYPY:
1453 @register(MethodType)
1454 def save_instancemethod0(pickler, obj):
1455 code = getattr(obj.__func__, '__code__', None)
1456 if code is not None and type(code) is not CodeType \
1457 and getattr(obj.__self__, obj.__name__) == obj:
1458 # Some PyPy builtin functions have no module name
1459 logger.trace(pickler, "Me2: %s", obj)
1460 # TODO: verify that this works for all PyPy builtin methods
1461 pickler.save_reduce(getattr, (obj.__self__, obj.__name__), obj=obj)
1462 logger.trace(pickler, "# Me2")
1463 return
1465 logger.trace(pickler, "Me1: %s", obj)
1466 pickler.save_reduce(MethodType, (obj.__func__, obj.__self__), obj=obj)
1467 logger.trace(pickler, "# Me1")
1468 return
1469else:
1470 @register(MethodType)
1471 def save_instancemethod0(pickler, obj):
1472 logger.trace(pickler, "Me1: %s", obj)
1473 pickler.save_reduce(MethodType, (obj.__func__, obj.__self__), obj=obj)
1474 logger.trace(pickler, "# Me1")
1475 return
1477if not IS_PYPY:
1478 @register(MemberDescriptorType)
1479 @register(GetSetDescriptorType)
1480 @register(MethodDescriptorType)
1481 @register(WrapperDescriptorType)
1482 @register(ClassMethodDescriptorType)
1483 def save_wrapper_descriptor(pickler, obj):
1484 logger.trace(pickler, "Wr: %s", obj)
1485 pickler.save_reduce(_getattr, (obj.__objclass__, obj.__name__,
1486 obj.__repr__()), obj=obj)
1487 logger.trace(pickler, "# Wr")
1488 return
1489else:
1490 @register(MemberDescriptorType)
1491 @register(GetSetDescriptorType)
1492 def save_wrapper_descriptor(pickler, obj):
1493 logger.trace(pickler, "Wr: %s", obj)
1494 pickler.save_reduce(_getattr, (obj.__objclass__, obj.__name__,
1495 obj.__repr__()), obj=obj)
1496 logger.trace(pickler, "# Wr")
1497 return
1499@register(CellType)
1500def save_cell(pickler, obj):
1501 try:
1502 f = obj.cell_contents
1503 except ValueError: # cell is empty
1504 logger.trace(pickler, "Ce3: %s", obj)
1505 # _shims._CELL_EMPTY is defined in _shims.py to support PyPy 2.7.
1506 # It unpickles to a sentinel object _dill._CELL_EMPTY, also created in
1507 # _shims.py. This object is not present in Python 3 because the cell's
1508 # contents can be deleted in newer versions of Python. The reduce object
1509 # will instead unpickle to None if unpickled in Python 3.
1511 # When breaking changes are made to dill, (_shims._CELL_EMPTY,) can
1512 # be replaced by () OR the delattr function can be removed repending on
1513 # whichever is more convienient.
1514 pickler.save_reduce(_create_cell, (_shims._CELL_EMPTY,), obj=obj)
1515 # Call the function _delattr on the cell's cell_contents attribute
1516 # The result of this function call will be None
1517 pickler.save_reduce(_shims._delattr, (obj, 'cell_contents'))
1518 # pop None created by calling _delattr off stack
1519 pickler.write(POP)
1520 logger.trace(pickler, "# Ce3")
1521 return
1522 if is_dill(pickler, child=True):
1523 if id(f) in pickler._postproc:
1524 # Already seen. Add to its postprocessing.
1525 postproc = pickler._postproc[id(f)]
1526 else:
1527 # Haven't seen it. Add to the highest possible object and set its
1528 # value as late as possible to prevent cycle.
1529 postproc = next(iter(pickler._postproc.values()), None)
1530 if postproc is not None:
1531 logger.trace(pickler, "Ce2: %s", obj)
1532 # _CELL_REF is defined in _shims.py to support older versions of
1533 # dill. When breaking changes are made to dill, (_CELL_REF,) can
1534 # be replaced by ()
1535 pickler.save_reduce(_create_cell, (_CELL_REF,), obj=obj)
1536 postproc.append((_shims._setattr, (obj, 'cell_contents', f)))
1537 logger.trace(pickler, "# Ce2")
1538 return
1539 logger.trace(pickler, "Ce1: %s", obj)
1540 pickler.save_reduce(_create_cell, (f,), obj=obj)
1541 logger.trace(pickler, "# Ce1")
1542 return
1544if MAPPING_PROXY_TRICK:
1545 @register(DictProxyType)
1546 def save_dictproxy(pickler, obj):
1547 logger.trace(pickler, "Mp: %s", _repr_dict(obj)) # obj
1548 mapping = obj | _dictproxy_helper_instance
1549 pickler.save_reduce(DictProxyType, (mapping,), obj=obj)
1550 logger.trace(pickler, "# Mp")
1551 return
1552else:
1553 @register(DictProxyType)
1554 def save_dictproxy(pickler, obj):
1555 logger.trace(pickler, "Mp: %s", _repr_dict(obj)) # obj
1556 pickler.save_reduce(DictProxyType, (obj.copy(),), obj=obj)
1557 logger.trace(pickler, "# Mp")
1558 return
1560@register(SliceType)
1561def save_slice(pickler, obj):
1562 logger.trace(pickler, "Sl: %s", obj)
1563 pickler.save_reduce(slice, (obj.start, obj.stop, obj.step), obj=obj)
1564 logger.trace(pickler, "# Sl")
1565 return
1567@register(XRangeType)
1568@register(EllipsisType)
1569@register(NotImplementedType)
1570def save_singleton(pickler, obj):
1571 logger.trace(pickler, "Si: %s", obj)
1572 pickler.save_reduce(_eval_repr, (obj.__repr__(),), obj=obj)
1573 logger.trace(pickler, "# Si")
1574 return
1576def _proxy_helper(obj): # a dead proxy returns a reference to None
1577 """get memory address of proxy's reference object"""
1578 _repr = repr(obj)
1579 try: _str = str(obj)
1580 except ReferenceError: # it's a dead proxy
1581 return id(None)
1582 if _str == _repr: return id(obj) # it's a repr
1583 try: # either way, it's a proxy from here
1584 address = int(_str.rstrip('>').split(' at ')[-1], base=16)
1585 except ValueError: # special case: proxy of a 'type'
1586 if not IS_PYPY:
1587 address = int(_repr.rstrip('>').split(' at ')[-1], base=16)
1588 else:
1589 objects = iter(gc.get_objects())
1590 for _obj in objects:
1591 if repr(_obj) == _str: return id(_obj)
1592 # all bad below... nothing found so throw ReferenceError
1593 msg = "Cannot reference object for proxy at '%s'" % id(obj)
1594 raise ReferenceError(msg)
1595 return address
1597def _locate_object(address, module=None):
1598 """get object located at the given memory address (inverse of id(obj))"""
1599 special = [None, True, False] #XXX: more...?
1600 for obj in special:
1601 if address == id(obj): return obj
1602 if module:
1603 objects = iter(module.__dict__.values())
1604 else: objects = iter(gc.get_objects())
1605 for obj in objects:
1606 if address == id(obj): return obj
1607 # all bad below... nothing found so throw ReferenceError or TypeError
1608 try: address = hex(address)
1609 except TypeError:
1610 raise TypeError("'%s' is not a valid memory address" % str(address))
1611 raise ReferenceError("Cannot reference object at '%s'" % address)
1613@register(ReferenceType)
1614def save_weakref(pickler, obj):
1615 refobj = obj()
1616 logger.trace(pickler, "R1: %s", obj)
1617 #refobj = ctypes.pythonapi.PyWeakref_GetObject(obj) # dead returns "None"
1618 pickler.save_reduce(_create_weakref, (refobj,), obj=obj)
1619 logger.trace(pickler, "# R1")
1620 return
1622@register(ProxyType)
1623@register(CallableProxyType)
1624def save_weakproxy(pickler, obj):
1625 # Must do string substitution here and use %r to avoid ReferenceError.
1626 logger.trace(pickler, "R2: %r" % obj)
1627 refobj = _locate_object(_proxy_helper(obj))
1628 pickler.save_reduce(_create_weakproxy, (refobj, callable(obj)), obj=obj)
1629 logger.trace(pickler, "# R2")
1630 return
1632def _is_builtin_module(module):
1633 if not hasattr(module, "__file__"): return True
1634 if module.__file__ is None: return False
1635 # If a module file name starts with prefix, it should be a builtin
1636 # module, so should always be pickled as a reference.
1637 names = ["base_prefix", "base_exec_prefix", "exec_prefix", "prefix", "real_prefix"]
1638 rp = os.path.realpath
1639 # See https://github.com/uqfoundation/dill/issues/566
1640 return (
1641 any(
1642 module.__file__.startswith(getattr(sys, name))
1643 or rp(module.__file__).startswith(rp(getattr(sys, name)))
1644 for name in names
1645 if hasattr(sys, name)
1646 )
1647 or module.__file__.endswith(EXTENSION_SUFFIXES)
1648 or 'site-packages' in module.__file__
1649 )
1651def _is_imported_module(module):
1652 return getattr(module, '__loader__', None) is not None or module in sys.modules.values()
1654@register(ModuleType)
1655def save_module(pickler, obj):
1656 if False: #_use_diff:
1657 if obj.__name__.split('.', 1)[0] != "dill":
1658 try:
1659 changed = diff.whats_changed(obj, seen=pickler._diff_cache)[0]
1660 except RuntimeError: # not memorised module, probably part of dill
1661 pass
1662 else:
1663 logger.trace(pickler, "M2: %s with diff", obj)
1664 logger.info("Diff: %s", changed.keys())
1665 pickler.save_reduce(_import_module, (obj.__name__,), obj=obj,
1666 state=changed)
1667 logger.trace(pickler, "# M2")
1668 return
1670 logger.trace(pickler, "M1: %s", obj)
1671 pickler.save_reduce(_import_module, (obj.__name__,), obj=obj)
1672 logger.trace(pickler, "# M1")
1673 else:
1674 builtin_mod = _is_builtin_module(obj)
1675 is_session_main = is_dill(pickler, child=True) and obj is pickler._main
1676 if (obj.__name__ not in ("builtins", "dill", "dill._dill") and not builtin_mod
1677 or is_session_main):
1678 logger.trace(pickler, "M1: %s", obj)
1679 # Hack for handling module-type objects in load_module().
1680 mod_name = obj.__name__ if _is_imported_module(obj) else '__runtime__.%s' % obj.__name__
1681 # Second references are saved as __builtin__.__main__ in save_module_dict().
1682 main_dict = obj.__dict__.copy()
1683 for item in ('__builtins__', '__loader__'):
1684 main_dict.pop(item, None)
1685 for item in IPYTHON_SINGLETONS: #pragma: no cover
1686 if getattr(main_dict.get(item), '__module__', '').startswith('IPython'):
1687 del main_dict[item]
1688 pickler.save_reduce(_import_module, (mod_name,), obj=obj, state=main_dict)
1689 logger.trace(pickler, "# M1")
1690 elif obj.__name__ == "dill._dill":
1691 logger.trace(pickler, "M2: %s", obj)
1692 pickler.save_global(obj, name="_dill")
1693 logger.trace(pickler, "# M2")
1694 else:
1695 logger.trace(pickler, "M2: %s", obj)
1696 pickler.save_reduce(_import_module, (obj.__name__,), obj=obj)
1697 logger.trace(pickler, "# M2")
1698 return
1700# The following function is based on '_extract_class_dict' from 'cloudpickle'
1701# Copyright (c) 2012, Regents of the University of California.
1702# Copyright (c) 2009 `PiCloud, Inc. <http://www.picloud.com>`_.
1703# License: https://github.com/cloudpipe/cloudpickle/blob/master/LICENSE
1704def _get_typedict_type(cls, clsdict, attrs, postproc_list):
1705 """Retrieve a copy of the dict of a class without the inherited methods"""
1706 if len(cls.__bases__) == 1:
1707 inherited_dict = cls.__bases__[0].__dict__
1708 else:
1709 inherited_dict = {}
1710 for base in reversed(cls.__bases__):
1711 inherited_dict.update(base.__dict__)
1712 to_remove = []
1713 for name, value in dict.items(clsdict):
1714 try:
1715 base_value = inherited_dict[name]
1716 if value is base_value and hasattr(value, '__qualname__'):
1717 to_remove.append(name)
1718 except KeyError:
1719 pass
1720 for name in to_remove:
1721 dict.pop(clsdict, name)
1723 if issubclass(type(cls), type):
1724 clsdict.pop('__dict__', None)
1725 clsdict.pop('__weakref__', None)
1726 # clsdict.pop('__prepare__', None)
1727 return clsdict, attrs
1729def _get_typedict_abc(obj, _dict, attrs, postproc_list):
1730 if hasattr(abc, '_get_dump'):
1731 (registry, _, _, _) = abc._get_dump(obj)
1732 register = obj.register
1733 postproc_list.extend((register, (reg(),)) for reg in registry)
1734 elif hasattr(obj, '_abc_registry'):
1735 registry = obj._abc_registry
1736 register = obj.register
1737 postproc_list.extend((register, (reg,)) for reg in registry)
1738 else:
1739 raise PicklingError("Cannot find registry of ABC %s", obj)
1741 if '_abc_registry' in _dict:
1742 _dict.pop('_abc_registry', None)
1743 _dict.pop('_abc_cache', None)
1744 _dict.pop('_abc_negative_cache', None)
1745 # _dict.pop('_abc_negative_cache_version', None)
1746 else:
1747 _dict.pop('_abc_impl', None)
1748 return _dict, attrs
1750@register(TypeType)
1751def save_type(pickler, obj, postproc_list=None):
1752 if obj in _typemap:
1753 logger.trace(pickler, "T1: %s", obj)
1754 # if obj in _incedental_types:
1755 # warnings.warn('Type %r may only exist on this implementation of Python and cannot be unpickled in other implementations.' % (obj,), PicklingWarning)
1756 pickler.save_reduce(_load_type, (_typemap[obj],), obj=obj)
1757 logger.trace(pickler, "# T1")
1758 elif obj.__bases__ == (tuple,) and all([hasattr(obj, attr) for attr in ('_fields','_asdict','_make','_replace')]):
1759 # special case: namedtuples
1760 logger.trace(pickler, "T6: %s", obj)
1762 obj_name = getattr(obj, '__qualname__', getattr(obj, '__name__', None))
1763 if obj.__name__ != obj_name:
1764 if postproc_list is None:
1765 postproc_list = []
1766 postproc_list.append((setattr, (obj, '__qualname__', obj_name)))
1768 if not obj._field_defaults:
1769 _save_with_postproc(pickler, (_create_namedtuple, (obj.__name__, obj._fields, obj.__module__)), obj=obj, postproc_list=postproc_list)
1770 else:
1771 defaults = [obj._field_defaults[field] for field in obj._fields if field in obj._field_defaults]
1772 _save_with_postproc(pickler, (_create_namedtuple, (obj.__name__, obj._fields, obj.__module__, defaults)), obj=obj, postproc_list=postproc_list)
1773 logger.trace(pickler, "# T6")
1774 return
1776 # special cases: NoneType, NotImplementedType, EllipsisType, EnumMeta
1777 elif obj is type(None):
1778 logger.trace(pickler, "T7: %s", obj)
1779 #XXX: pickler.save_reduce(type, (None,), obj=obj)
1780 pickler.write(GLOBAL + b'__builtin__\nNoneType\n')
1781 logger.trace(pickler, "# T7")
1782 elif obj is NotImplementedType:
1783 logger.trace(pickler, "T7: %s", obj)
1784 pickler.save_reduce(type, (NotImplemented,), obj=obj)
1785 logger.trace(pickler, "# T7")
1786 elif obj is EllipsisType:
1787 logger.trace(pickler, "T7: %s", obj)
1788 pickler.save_reduce(type, (Ellipsis,), obj=obj)
1789 logger.trace(pickler, "# T7")
1790 elif obj is EnumMeta:
1791 logger.trace(pickler, "T7: %s", obj)
1792 pickler.write(GLOBAL + b'enum\nEnumMeta\n')
1793 logger.trace(pickler, "# T7")
1795 else:
1796 _byref = getattr(pickler, '_byref', None)
1797 obj_recursive = id(obj) in getattr(pickler, '_postproc', ())
1798 incorrectly_named = not _locate_function(obj, pickler)
1799 if not _byref and not obj_recursive and incorrectly_named: # not a function, but the name was held over
1800 if postproc_list is None:
1801 postproc_list = []
1803 # thanks to Tom Stepleton pointing out pickler._session unneeded
1804 logger.trace(pickler, "T2: %s", obj)
1805 _dict, attrs = _get_typedict_type(obj, obj.__dict__.copy(), None, postproc_list) # copy dict proxy to a dict
1807 #print (_dict)
1808 #print ("%s\n%s" % (type(obj), obj.__name__))
1809 #print ("%s\n%s" % (obj.__bases__, obj.__dict__))
1810 slots = _dict.get('__slots__', ())
1811 if type(slots) == str:
1812 # __slots__ accepts a single string
1813 slots = (slots,)
1815 for name in slots:
1816 _dict.pop(name, None)
1818 if isinstance(obj, abc.ABCMeta):
1819 logger.trace(pickler, "ABC: %s", obj)
1820 _dict, attrs = _get_typedict_abc(obj, _dict, attrs, postproc_list)
1821 logger.trace(pickler, "# ABC")
1823 qualname = getattr(obj, '__qualname__', None)
1824 if attrs is not None:
1825 for k, v in attrs.items():
1826 postproc_list.append((setattr, (obj, k, v)))
1827 # TODO: Consider using the state argument to save_reduce?
1828 if qualname is not None:
1829 postproc_list.append((setattr, (obj, '__qualname__', qualname)))
1831 if not hasattr(obj, '__orig_bases__'):
1832 _save_with_postproc(pickler, (_create_type, (
1833 type(obj), obj.__name__, obj.__bases__, _dict
1834 )), obj=obj, postproc_list=postproc_list)
1835 else:
1836 # This case will always work, but might be overkill.
1837 _metadict = {
1838 'metaclass': type(obj)
1839 }
1841 if _dict:
1842 _dict_update = PartialType(_setitems, source=_dict)
1843 else:
1844 _dict_update = None
1846 _save_with_postproc(pickler, (new_class, (
1847 obj.__name__, obj.__orig_bases__, _metadict, _dict_update
1848 )), obj=obj, postproc_list=postproc_list)
1849 logger.trace(pickler, "# T2")
1850 else:
1851 obj_name = getattr(obj, '__qualname__', getattr(obj, '__name__', None))
1852 logger.trace(pickler, "T4: %s", obj)
1853 if incorrectly_named:
1854 warnings.warn(
1855 "Cannot locate reference to %r." % (obj,),
1856 PicklingWarning,
1857 stacklevel=3,
1858 )
1859 if obj_recursive:
1860 warnings.warn(
1861 "Cannot pickle %r: %s.%s has recursive self-references that "
1862 "trigger a RecursionError." % (obj, obj.__module__, obj_name),
1863 PicklingWarning,
1864 stacklevel=3,
1865 )
1866 #print (obj.__dict__)
1867 #print ("%s\n%s" % (type(obj), obj.__name__))
1868 #print ("%s\n%s" % (obj.__bases__, obj.__dict__))
1869 StockPickler.save_global(pickler, obj, name=obj_name)
1870 logger.trace(pickler, "# T4")
1871 return
1873@register(property)
1874@register(abc.abstractproperty)
1875def save_property(pickler, obj):
1876 logger.trace(pickler, "Pr: %s", obj)
1877 pickler.save_reduce(type(obj), (obj.fget, obj.fset, obj.fdel, obj.__doc__),
1878 obj=obj)
1879 logger.trace(pickler, "# Pr")
1881@register(staticmethod)
1882@register(classmethod)
1883@register(abc.abstractstaticmethod)
1884@register(abc.abstractclassmethod)
1885def save_classmethod(pickler, obj):
1886 logger.trace(pickler, "Cm: %s", obj)
1887 orig_func = obj.__func__
1889 # if type(obj.__dict__) is dict:
1890 # if obj.__dict__:
1891 # state = obj.__dict__
1892 # else:
1893 # state = None
1894 # else:
1895 # state = (None, {'__dict__', obj.__dict__})
1897 pickler.save_reduce(type(obj), (orig_func,), obj=obj)
1898 logger.trace(pickler, "# Cm")
1900@register(FunctionType)
1901def save_function(pickler, obj):
1902 if not _locate_function(obj, pickler):
1903 if type(obj.__code__) is not CodeType:
1904 # Some PyPy builtin functions have no module name, and thus are not
1905 # able to be located
1906 module_name = getattr(obj, '__module__', None)
1907 if module_name is None:
1908 module_name = __builtin__.__name__
1909 module = _import_module(module_name, safe=True)
1910 _pypy_builtin = False
1911 try:
1912 found, _ = _getattribute(module, obj.__qualname__)
1913 if getattr(found, '__func__', None) is obj:
1914 _pypy_builtin = True
1915 except AttributeError:
1916 pass
1918 if _pypy_builtin:
1919 logger.trace(pickler, "F3: %s", obj)
1920 pickler.save_reduce(getattr, (found, '__func__'), obj=obj)
1921 logger.trace(pickler, "# F3")
1922 return
1924 logger.trace(pickler, "F1: %s", obj)
1925 _recurse = getattr(pickler, '_recurse', None)
1926 _postproc = getattr(pickler, '_postproc', None)
1927 _main_modified = getattr(pickler, '_main_modified', None)
1928 _original_main = getattr(pickler, '_original_main', __builtin__)#'None'
1929 postproc_list = []
1930 if _recurse:
1931 # recurse to get all globals referred to by obj
1932 from .detect import globalvars
1933 globs_copy = globalvars(obj, recurse=True, builtin=True)
1935 # Add the name of the module to the globs dictionary to prevent
1936 # the duplication of the dictionary. Pickle the unpopulated
1937 # globals dictionary and set the remaining items after the function
1938 # is created to correctly handle recursion.
1939 globs = {'__name__': obj.__module__}
1940 else:
1941 globs_copy = obj.__globals__
1943 # If the globals is the __dict__ from the module being saved as a
1944 # session, substitute it by the dictionary being actually saved.
1945 if _main_modified and globs_copy is _original_main.__dict__:
1946 globs_copy = getattr(pickler, '_main', _original_main).__dict__
1947 globs = globs_copy
1948 # If the globals is a module __dict__, do not save it in the pickle.
1949 elif globs_copy is not None and obj.__module__ is not None and \
1950 getattr(_import_module(obj.__module__, True), '__dict__', None) is globs_copy:
1951 globs = globs_copy
1952 else:
1953 globs = {'__name__': obj.__module__}
1955 if globs_copy is not None and globs is not globs_copy:
1956 # In the case that the globals are copied, we need to ensure that
1957 # the globals dictionary is updated when all objects in the
1958 # dictionary are already created.
1959 glob_ids = {id(g) for g in globs_copy.values()}
1960 for stack_element in _postproc:
1961 if stack_element in glob_ids:
1962 _postproc[stack_element].append((_setitems, (globs, globs_copy)))
1963 break
1964 else:
1965 postproc_list.append((_setitems, (globs, globs_copy)))
1967 closure = obj.__closure__
1968 state_dict = {}
1969 for fattrname in ('__doc__', '__kwdefaults__', '__annotations__'):
1970 fattr = getattr(obj, fattrname, None)
1971 if fattr is not None:
1972 state_dict[fattrname] = fattr
1973 if obj.__qualname__ != obj.__name__:
1974 state_dict['__qualname__'] = obj.__qualname__
1975 if '__name__' not in globs or obj.__module__ != globs['__name__']:
1976 state_dict['__module__'] = obj.__module__
1978 state = obj.__dict__
1979 if type(state) is not dict:
1980 state_dict['__dict__'] = state
1981 state = None
1982 if state_dict:
1983 state = state, state_dict
1985 _save_with_postproc(pickler, (_create_function, (
1986 obj.__code__, globs, obj.__name__, obj.__defaults__,
1987 closure
1988 ), state), obj=obj, postproc_list=postproc_list)
1990 # Lift closure cell update to earliest function (#458)
1991 if _postproc:
1992 topmost_postproc = next(iter(_postproc.values()), None)
1993 if closure and topmost_postproc:
1994 for cell in closure:
1995 possible_postproc = (setattr, (cell, 'cell_contents', obj))
1996 try:
1997 topmost_postproc.remove(possible_postproc)
1998 except ValueError:
1999 continue
2001 # Change the value of the cell
2002 pickler.save_reduce(*possible_postproc)
2003 # pop None created by calling preprocessing step off stack
2004 pickler.write(POP)
2006 logger.trace(pickler, "# F1")
2007 else:
2008 logger.trace(pickler, "F2: %s", obj)
2009 name = getattr(obj, '__qualname__', getattr(obj, '__name__', None))
2010 StockPickler.save_global(pickler, obj, name=name)
2011 logger.trace(pickler, "# F2")
2012 return
2014if HAS_CTYPES and hasattr(ctypes, 'pythonapi'):
2015 _PyCapsule_New = ctypes.pythonapi.PyCapsule_New
2016 _PyCapsule_New.argtypes = (ctypes.c_void_p, ctypes.c_char_p, ctypes.c_void_p)
2017 _PyCapsule_New.restype = ctypes.py_object
2018 _PyCapsule_GetPointer = ctypes.pythonapi.PyCapsule_GetPointer
2019 _PyCapsule_GetPointer.argtypes = (ctypes.py_object, ctypes.c_char_p)
2020 _PyCapsule_GetPointer.restype = ctypes.c_void_p
2021 _PyCapsule_GetDestructor = ctypes.pythonapi.PyCapsule_GetDestructor
2022 _PyCapsule_GetDestructor.argtypes = (ctypes.py_object,)
2023 _PyCapsule_GetDestructor.restype = ctypes.c_void_p
2024 _PyCapsule_GetContext = ctypes.pythonapi.PyCapsule_GetContext
2025 _PyCapsule_GetContext.argtypes = (ctypes.py_object,)
2026 _PyCapsule_GetContext.restype = ctypes.c_void_p
2027 _PyCapsule_GetName = ctypes.pythonapi.PyCapsule_GetName
2028 _PyCapsule_GetName.argtypes = (ctypes.py_object,)
2029 _PyCapsule_GetName.restype = ctypes.c_char_p
2030 _PyCapsule_IsValid = ctypes.pythonapi.PyCapsule_IsValid
2031 _PyCapsule_IsValid.argtypes = (ctypes.py_object, ctypes.c_char_p)
2032 _PyCapsule_IsValid.restype = ctypes.c_bool
2033 _PyCapsule_SetContext = ctypes.pythonapi.PyCapsule_SetContext
2034 _PyCapsule_SetContext.argtypes = (ctypes.py_object, ctypes.c_void_p)
2035 _PyCapsule_SetDestructor = ctypes.pythonapi.PyCapsule_SetDestructor
2036 _PyCapsule_SetDestructor.argtypes = (ctypes.py_object, ctypes.c_void_p)
2037 _PyCapsule_SetName = ctypes.pythonapi.PyCapsule_SetName
2038 _PyCapsule_SetName.argtypes = (ctypes.py_object, ctypes.c_char_p)
2039 _PyCapsule_SetPointer = ctypes.pythonapi.PyCapsule_SetPointer
2040 _PyCapsule_SetPointer.argtypes = (ctypes.py_object, ctypes.c_void_p)
2041 #from _socket import CAPI as _testcapsule
2042 _testcapsule_name = b'dill._dill._testcapsule'
2043 _testcapsule = _PyCapsule_New(
2044 ctypes.cast(_PyCapsule_New, ctypes.c_void_p),
2045 ctypes.c_char_p(_testcapsule_name),
2046 None
2047 )
2048 PyCapsuleType = type(_testcapsule)
2049 @register(PyCapsuleType)
2050 def save_capsule(pickler, obj):
2051 logger.trace(pickler, "Cap: %s", obj)
2052 name = _PyCapsule_GetName(obj)
2053 #warnings.warn('Pickling a PyCapsule (%s) does not pickle any C data structures and could cause segmentation faults or other memory errors when unpickling.' % (name,), PicklingWarning)
2054 pointer = _PyCapsule_GetPointer(obj, name)
2055 context = _PyCapsule_GetContext(obj)
2056 destructor = _PyCapsule_GetDestructor(obj)
2057 pickler.save_reduce(_create_capsule, (pointer, name, context, destructor), obj=obj)
2058 logger.trace(pickler, "# Cap")
2059 _incedental_reverse_typemap['PyCapsuleType'] = PyCapsuleType
2060 _reverse_typemap['PyCapsuleType'] = PyCapsuleType
2061 _incedental_types.add(PyCapsuleType)
2062else:
2063 _testcapsule = None
2066#############################
2067# A quick fix for issue #500
2068# This should be removed when a better solution is found.
2070if hasattr(dataclasses, "_HAS_DEFAULT_FACTORY_CLASS"):
2071 @register(dataclasses._HAS_DEFAULT_FACTORY_CLASS)
2072 def save_dataclasses_HAS_DEFAULT_FACTORY_CLASS(pickler, obj):
2073 logger.trace(pickler, "DcHDF: %s", obj)
2074 pickler.write(GLOBAL + b"dataclasses\n_HAS_DEFAULT_FACTORY\n")
2075 logger.trace(pickler, "# DcHDF")
2077if hasattr(dataclasses, "MISSING"):
2078 @register(type(dataclasses.MISSING))
2079 def save_dataclasses_MISSING_TYPE(pickler, obj):
2080 logger.trace(pickler, "DcM: %s", obj)
2081 pickler.write(GLOBAL + b"dataclasses\nMISSING\n")
2082 logger.trace(pickler, "# DcM")
2084if hasattr(dataclasses, "KW_ONLY"):
2085 @register(type(dataclasses.KW_ONLY))
2086 def save_dataclasses_KW_ONLY_TYPE(pickler, obj):
2087 logger.trace(pickler, "DcKWO: %s", obj)
2088 pickler.write(GLOBAL + b"dataclasses\nKW_ONLY\n")
2089 logger.trace(pickler, "# DcKWO")
2091if hasattr(dataclasses, "_FIELD_BASE"):
2092 @register(dataclasses._FIELD_BASE)
2093 def save_dataclasses_FIELD_BASE(pickler, obj):
2094 logger.trace(pickler, "DcFB: %s", obj)
2095 pickler.write(GLOBAL + b"dataclasses\n" + obj.name.encode() + b"\n")
2096 logger.trace(pickler, "# DcFB")
2098#############################
2100# quick sanity checking
2101def pickles(obj,exact=False,safe=False,**kwds):
2102 """
2103 Quick check if object pickles with dill.
2105 If *exact=True* then an equality test is done to check if the reconstructed
2106 object matches the original object.
2108 If *safe=True* then any exception will raised in copy signal that the
2109 object is not picklable, otherwise only pickling errors will be trapped.
2111 Additional keyword arguments are as :func:`dumps` and :func:`loads`.
2112 """
2113 if safe: exceptions = (Exception,) # RuntimeError, ValueError
2114 else:
2115 exceptions = (TypeError, AssertionError, NotImplementedError, PicklingError, UnpicklingError)
2116 try:
2117 pik = copy(obj, **kwds)
2118 #FIXME: should check types match first, then check content if "exact"
2119 try:
2120 #FIXME: should be "(pik == obj).all()" for numpy comparison, though that'll fail if shapes differ
2121 result = bool(pik.all() == obj.all())
2122 except (AttributeError, TypeError):
2123 warnings.filterwarnings('ignore') #FIXME: be specific
2124 result = pik == obj
2125 if warnings.filters: del warnings.filters[0]
2126 if hasattr(result, 'toarray'): # for unusual types like sparse matrix
2127 result = result.toarray().all()
2128 if result: return True
2129 if not exact:
2130 result = type(pik) == type(obj)
2131 if result: return result
2132 # class instances might have been dumped with byref=False
2133 return repr(type(pik)) == repr(type(obj)) #XXX: InstanceType?
2134 return False
2135 except exceptions:
2136 return False
2138def check(obj, *args, **kwds):
2139 """
2140 Check pickling of an object across another process.
2142 *python* is the path to the python interpreter (defaults to sys.executable)
2144 Set *verbose=True* to print the unpickled object in the other process.
2146 Additional keyword arguments are as :func:`dumps` and :func:`loads`.
2147 """
2148 # == undocumented ==
2149 # python -- the string path or executable name of the selected python
2150 # verbose -- if True, be verbose about printing warning messages
2151 # all other args and kwds are passed to dill.dumps #FIXME: ignore on load
2152 verbose = kwds.pop('verbose', False)
2153 python = kwds.pop('python', None)
2154 if python is None:
2155 import sys
2156 python = sys.executable
2157 # type check
2158 isinstance(python, str)
2159 import subprocess
2160 fail = True
2161 try:
2162 _obj = dumps(obj, *args, **kwds)
2163 fail = False
2164 finally:
2165 if fail and verbose:
2166 print("DUMP FAILED")
2167 #FIXME: fails if python interpreter path contains spaces
2168 # Use the following instead (which also processes the 'ignore' keyword):
2169 # ignore = kwds.pop('ignore', None)
2170 # unpickle = "dill.loads(%s, ignore=%s)"%(repr(_obj), repr(ignore))
2171 # cmd = [python, "-c", "import dill; print(%s)"%unpickle]
2172 # msg = "SUCCESS" if not subprocess.call(cmd) else "LOAD FAILED"
2173 msg = "%s -c import dill; print(dill.loads(%s))" % (python, repr(_obj))
2174 msg = "SUCCESS" if not subprocess.call(msg.split(None,2)) else "LOAD FAILED"
2175 if verbose:
2176 print(msg)
2177 return
2179# use to protect against missing attributes
2180def is_dill(pickler, child=None):
2181 "check the dill-ness of your pickler"
2182 if child is False or not hasattr(pickler.__class__, 'mro'):
2183 return 'dill' in pickler.__module__
2184 return Pickler in pickler.__class__.mro()
2186def _extend():
2187 """extend pickle with all of dill's registered types"""
2188 # need to have pickle not choke on _main_module? use is_dill(pickler)
2189 for t,func in Pickler.dispatch.items():
2190 try:
2191 StockPickler.dispatch[t] = func
2192 except Exception: #TypeError, PicklingError, UnpicklingError
2193 logger.trace(pickler, "skip: %s", t)
2194 return
2196del diff, _use_diff, use_diff
2198# EOF