Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/dill/_dill.py: 49%
Shortcuts on this page
r m x toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
Shortcuts on this page
r m x toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# -*- coding: utf-8 -*-
2#
3# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4# Copyright (c) 2008-2015 California Institute of Technology.
5# Copyright (c) 2016-2024 The Uncertainty Quantification Foundation.
6# License: 3-clause BSD. The full license text is available at:
7# - https://github.com/uqfoundation/dill/blob/master/LICENSE
8"""
9dill: a utility for serialization of python objects
11The primary functions in `dill` are :func:`dump` and
12:func:`dumps` for serialization ("pickling") to a
13file or to a string, respectively, and :func:`load`
14and :func:`loads` for deserialization ("unpickling"),
15similarly, from a file or from a string. Other notable
16functions are :func:`~dill.dump_module` and
17:func:`~dill.load_module`, which are used to save and
18restore module objects, including an intepreter session.
20Based on code written by Oren Tirosh and Armin Ronacher.
21Extended to a (near) full set of the builtin types (in types module),
22and coded to the pickle interface, by <mmckerns@caltech.edu>.
23Initial port to python3 by Jonathan Dobson, continued by mmckerns.
24Tested against "all" python types (Std. Lib. CH 1-15 @ 2.7) by mmckerns.
25Tested against CH16+ Std. Lib. ... TBD.
26"""
28from __future__ import annotations
30__all__ = [
31 'dump','dumps','load','loads','copy',
32 'Pickler','Unpickler','register','pickle','pickles','check',
33 'DEFAULT_PROTOCOL','HIGHEST_PROTOCOL','HANDLE_FMODE','CONTENTS_FMODE','FILE_FMODE',
34 'PickleError','PickleWarning','PicklingError','PicklingWarning','UnpicklingError',
35 'UnpicklingWarning',
36]
38__module__ = 'dill'
40import warnings
41from .logger import adapter as logger
42from .logger import trace as _trace
43log = logger # backward compatibility (see issue #582)
45import os
46import sys
47diff = None
48_use_diff = False
49OLD38 = (sys.hexversion < 0x3080000)
50OLD39 = (sys.hexversion < 0x3090000)
51OLD310 = (sys.hexversion < 0x30a0000)
52OLD312a7 = (sys.hexversion < 0x30c00a7)
53#XXX: get types from .objtypes ?
54import builtins as __builtin__
55from pickle import _Pickler as StockPickler, Unpickler as StockUnpickler
56from pickle import GLOBAL, POP
57from _thread import LockType
58from _thread import RLock as RLockType
59try:
60 from _thread import _ExceptHookArgs as ExceptHookArgsType
61except ImportError:
62 ExceptHookArgsType = None
63try:
64 from _thread import _ThreadHandle as ThreadHandleType
65except ImportError:
66 ThreadHandleType = None
67#from io import IOBase
68from types import CodeType, FunctionType, MethodType, GeneratorType, \
69 TracebackType, FrameType, ModuleType, BuiltinMethodType
70BufferType = memoryview #XXX: unregistered
71ClassType = type # no 'old-style' classes
72EllipsisType = type(Ellipsis)
73#FileType = IOBase
74NotImplementedType = type(NotImplemented)
75SliceType = slice
76TypeType = type # 'new-style' classes #XXX: unregistered
77XRangeType = range
78from types import MappingProxyType as DictProxyType, new_class
79from pickle import DEFAULT_PROTOCOL, HIGHEST_PROTOCOL, PickleError, PicklingError, UnpicklingError
80import __main__ as _main_module
81import marshal
82import gc
83# import zlib
84import abc
85import dataclasses
86from weakref import ReferenceType, ProxyType, CallableProxyType
87from collections import OrderedDict
88from enum import Enum, EnumMeta
89from functools import partial
90from operator import itemgetter, attrgetter
91GENERATOR_FAIL = False
92import importlib.machinery
93EXTENSION_SUFFIXES = tuple(importlib.machinery.EXTENSION_SUFFIXES)
94try:
95 import ctypes
96 HAS_CTYPES = True
97 # if using `pypy`, pythonapi is not found
98 IS_PYPY = not hasattr(ctypes, 'pythonapi')
99except ImportError:
100 HAS_CTYPES = False
101 IS_PYPY = False
102NumpyUfuncType = None
103NumpyDType = None
104NumpyArrayType = None
105try:
106 if not importlib.machinery.PathFinder().find_spec('numpy'):
107 raise ImportError("No module named 'numpy'")
108 NumpyUfuncType = True
109 NumpyDType = True
110 NumpyArrayType = True
111except ImportError:
112 pass
113def __hook__():
114 global NumpyArrayType, NumpyDType, NumpyUfuncType
115 from numpy import ufunc as NumpyUfuncType
116 from numpy import ndarray as NumpyArrayType
117 from numpy import dtype as NumpyDType
118 return True
119if NumpyArrayType: # then has numpy
120 def ndarraysubclassinstance(obj_type):
121 if all((c.__module__, c.__name__) != ('numpy', 'ndarray') for c in obj_type.__mro__):
122 return False
123 # anything below here is a numpy array (or subclass) instance
124 __hook__() # import numpy (so the following works!!!)
125 # verify that __reduce__ has not been overridden
126 if obj_type.__reduce_ex__ is not NumpyArrayType.__reduce_ex__ \
127 or obj_type.__reduce__ is not NumpyArrayType.__reduce__:
128 return False
129 return True
130 def numpyufunc(obj_type):
131 return any((c.__module__, c.__name__) == ('numpy', 'ufunc') for c in obj_type.__mro__)
132 def numpydtype(obj_type):
133 if all((c.__module__, c.__name__) != ('numpy', 'dtype') for c in obj_type.__mro__):
134 return False
135 # anything below here is a numpy dtype
136 __hook__() # import numpy (so the following works!!!)
137 return obj_type is type(NumpyDType) # handles subclasses
138else:
139 def ndarraysubclassinstance(obj): return False
140 def numpyufunc(obj): return False
141 def numpydtype(obj): return False
143from types import GetSetDescriptorType, ClassMethodDescriptorType, \
144 WrapperDescriptorType, MethodDescriptorType, MemberDescriptorType, \
145 MethodWrapperType #XXX: unused
147# make sure to add these 'hand-built' types to _typemap
148CellType = type((lambda x: lambda y: x)(0).__closure__[0])
149PartialType = type(partial(int, base=2))
150SuperType = type(super(Exception, TypeError()))
151ItemGetterType = type(itemgetter(0))
152AttrGetterType = type(attrgetter('__repr__'))
154try:
155 from functools import _lru_cache_wrapper as LRUCacheType
156except ImportError:
157 LRUCacheType = None
159if not isinstance(LRUCacheType, type):
160 LRUCacheType = None
162def get_file_type(*args, **kwargs):
163 open = kwargs.pop("open", __builtin__.open)
164 f = open(os.devnull, *args, **kwargs)
165 t = type(f)
166 f.close()
167 return t
169IS_PYODIDE = sys.platform == 'emscripten'
171FileType = get_file_type('rb', buffering=0)
172TextWrapperType = get_file_type('r', buffering=-1)
173BufferedRandomType = None if IS_PYODIDE else get_file_type('r+b', buffering=-1)
174BufferedReaderType = get_file_type('rb', buffering=-1)
175BufferedWriterType = get_file_type('wb', buffering=-1)
176try:
177 from _pyio import open as _open
178 PyTextWrapperType = get_file_type('r', buffering=-1, open=_open)
179 PyBufferedRandomType = None if IS_PYODIDE else get_file_type('r+b', buffering=-1, open=_open)
180 PyBufferedReaderType = get_file_type('rb', buffering=-1, open=_open)
181 PyBufferedWriterType = get_file_type('wb', buffering=-1, open=_open)
182except ImportError:
183 PyTextWrapperType = PyBufferedRandomType = PyBufferedReaderType = PyBufferedWriterType = None
184from io import BytesIO as StringIO
185InputType = OutputType = None
186from socket import socket as SocketType
187#FIXME: additionally calls ForkingPickler.register several times
188from multiprocessing.reduction import _reduce_socket as reduce_socket
189try: #pragma: no cover
190 IS_IPYTHON = __IPYTHON__ # is True
191 ExitType = None # IPython.core.autocall.ExitAutocall
192 IPYTHON_SINGLETONS = ('exit', 'quit', 'get_ipython')
193except NameError:
194 IS_IPYTHON = False
195 try: ExitType = type(exit) # apparently 'exit' can be removed
196 except NameError: ExitType = None
197 IPYTHON_SINGLETONS = ()
199import inspect
200import typing
203### Shims for different versions of Python and dill
204class Sentinel(object):
205 """
206 Create a unique sentinel object that is pickled as a constant.
207 """
208 def __init__(self, name, module_name=None):
209 self.name = name
210 if module_name is None:
211 # Use the calling frame's module
212 self.__module__ = inspect.currentframe().f_back.f_globals['__name__']
213 else:
214 self.__module__ = module_name # pragma: no cover
215 def __repr__(self):
216 return self.__module__ + '.' + self.name # pragma: no cover
217 def __copy__(self):
218 return self # pragma: no cover
219 def __deepcopy__(self, memo):
220 return self # pragma: no cover
221 def __reduce__(self):
222 return self.name
223 def __reduce_ex__(self, protocol):
224 return self.name
226from . import _shims
227from ._shims import Reduce, Getattr
229### File modes
230#: Pickles the file handle, preserving mode. The position of the unpickled
231#: object is as for a new file handle.
232HANDLE_FMODE = 0
233#: Pickles the file contents, creating a new file if on load the file does
234#: not exist. The position = min(pickled position, EOF) and mode is chosen
235#: as such that "best" preserves behavior of the original file.
236CONTENTS_FMODE = 1
237#: Pickles the entire file (handle and contents), preserving mode and position.
238FILE_FMODE = 2
240### Shorthands (modified from python2.5/lib/pickle.py)
241def copy(obj, *args, **kwds):
242 """
243 Use pickling to 'copy' an object (i.e. `loads(dumps(obj))`).
245 See :func:`dumps` and :func:`loads` for keyword arguments.
246 """
247 ignore = kwds.pop('ignore', Unpickler.settings['ignore'])
248 return loads(dumps(obj, *args, **kwds), ignore=ignore)
250def dump(obj, file, protocol=None, byref=None, fmode=None, recurse=None, **kwds):#, strictio=None):
251 """
252 Pickle an object to a file.
254 See :func:`dumps` for keyword arguments.
255 """
256 from .settings import settings
257 protocol = settings['protocol'] if protocol is None else int(protocol)
258 _kwds = kwds.copy()
259 _kwds.update(dict(byref=byref, fmode=fmode, recurse=recurse))
260 Pickler(file, protocol, **_kwds).dump(obj)
261 return
263def dumps(obj, protocol=None, byref=None, fmode=None, recurse=None, **kwds):#, strictio=None):
264 """
265 Pickle an object to a string.
267 *protocol* is the pickler protocol, as defined for Python *pickle*.
269 If *byref=True*, then dill behaves a lot more like pickle as certain
270 objects (like modules) are pickled by reference as opposed to attempting
271 to pickle the object itself.
273 If *recurse=True*, then objects referred to in the global dictionary
274 are recursively traced and pickled, instead of the default behavior
275 of attempting to store the entire global dictionary. This is needed for
276 functions defined via *exec()*.
278 *fmode* (:const:`HANDLE_FMODE`, :const:`CONTENTS_FMODE`,
279 or :const:`FILE_FMODE`) indicates how file handles will be pickled.
280 For example, when pickling a data file handle for transfer to a remote
281 compute service, *FILE_FMODE* will include the file contents in the
282 pickle and cursor position so that a remote method can operate
283 transparently on an object with an open file handle.
285 Default values for keyword arguments can be set in :mod:`dill.settings`.
286 """
287 file = StringIO()
288 dump(obj, file, protocol, byref, fmode, recurse, **kwds)#, strictio)
289 return file.getvalue()
291def load(file, ignore=None, **kwds):
292 """
293 Unpickle an object from a file.
295 See :func:`loads` for keyword arguments.
296 """
297 return Unpickler(file, ignore=ignore, **kwds).load()
299def loads(str, ignore=None, **kwds):
300 """
301 Unpickle an object from a string.
303 If *ignore=False* then objects whose class is defined in the module
304 *__main__* are updated to reference the existing class in *__main__*,
305 otherwise they are left to refer to the reconstructed type, which may
306 be different.
308 Default values for keyword arguments can be set in :mod:`dill.settings`.
309 """
310 file = StringIO(str)
311 return load(file, ignore, **kwds)
313# def dumpzs(obj, protocol=None):
314# """pickle an object to a compressed string"""
315# return zlib.compress(dumps(obj, protocol))
317# def loadzs(str):
318# """unpickle an object from a compressed string"""
319# return loads(zlib.decompress(str))
321### End: Shorthands ###
323class MetaCatchingDict(dict):
324 def get(self, key, default=None):
325 try:
326 return self[key]
327 except KeyError:
328 return default
330 def __missing__(self, key):
331 if issubclass(key, type):
332 return save_type
333 else:
334 raise KeyError()
336class PickleWarning(Warning, PickleError):
337 pass
339class PicklingWarning(PickleWarning, PicklingError):
340 pass
342class UnpicklingWarning(PickleWarning, UnpicklingError):
343 pass
345### Extend the Picklers
346class Pickler(StockPickler):
347 """python's Pickler extended to interpreter sessions"""
348 dispatch: typing.Dict[type, typing.Callable[[Pickler, typing.Any], None]] \
349 = MetaCatchingDict(StockPickler.dispatch.copy())
350 """The dispatch table, a dictionary of serializing functions used
351 by Pickler to save objects of specific types. Use :func:`pickle`
352 or :func:`register` to associate types to custom functions.
354 :meta hide-value:
355 """
356 _session = False
357 from .settings import settings
359 def __init__(self, file, *args, **kwds):
360 settings = Pickler.settings
361 _byref = kwds.pop('byref', None)
362 #_strictio = kwds.pop('strictio', None)
363 _fmode = kwds.pop('fmode', None)
364 _recurse = kwds.pop('recurse', None)
365 StockPickler.__init__(self, file, *args, **kwds)
366 self._main = _main_module
367 self._diff_cache = {}
368 self._byref = settings['byref'] if _byref is None else _byref
369 self._strictio = False #_strictio
370 self._fmode = settings['fmode'] if _fmode is None else _fmode
371 self._recurse = settings['recurse'] if _recurse is None else _recurse
372 self._postproc = OrderedDict()
373 self._file = file
375 def save(self, obj, save_persistent_id=True):
376 # numpy hack
377 obj_type = type(obj)
378 if NumpyArrayType and not (obj_type is type or obj_type in Pickler.dispatch):
379 # register if the object is a numpy ufunc
380 # thanks to Paul Kienzle for pointing out ufuncs didn't pickle
381 if numpyufunc(obj_type):
382 @register(obj_type)
383 def save_numpy_ufunc(pickler, obj):
384 logger.trace(pickler, "Nu: %s", obj)
385 name = getattr(obj, '__qualname__', getattr(obj, '__name__', None))
386 StockPickler.save_global(pickler, obj, name=name)
387 logger.trace(pickler, "# Nu")
388 return
389 # NOTE: the above 'save' performs like:
390 # import copy_reg
391 # def udump(f): return f.__name__
392 # def uload(name): return getattr(numpy, name)
393 # copy_reg.pickle(NumpyUfuncType, udump, uload)
394 # register if the object is a numpy dtype
395 if numpydtype(obj_type):
396 @register(obj_type)
397 def save_numpy_dtype(pickler, obj):
398 logger.trace(pickler, "Dt: %s", obj)
399 pickler.save_reduce(_create_dtypemeta, (obj.type,), obj=obj)
400 logger.trace(pickler, "# Dt")
401 return
402 # NOTE: the above 'save' performs like:
403 # import copy_reg
404 # def uload(name): return type(NumpyDType(name))
405 # def udump(f): return uload, (f.type,)
406 # copy_reg.pickle(NumpyDTypeType, udump, uload)
407 # register if the object is a subclassed numpy array instance
408 if ndarraysubclassinstance(obj_type):
409 @register(obj_type)
410 def save_numpy_array(pickler, obj):
411 logger.trace(pickler, "Nu: (%s, %s)", obj.shape, obj.dtype)
412 npdict = getattr(obj, '__dict__', None)
413 f, args, state = obj.__reduce__()
414 pickler.save_reduce(_create_array, (f,args,state,npdict), obj=obj)
415 logger.trace(pickler, "# Nu")
416 return
417 # end numpy hack
419 if GENERATOR_FAIL and obj_type is GeneratorType:
420 msg = "Can't pickle %s: attribute lookup builtins.generator failed" % GeneratorType
421 raise PicklingError(msg)
422 StockPickler.save(self, obj, save_persistent_id)
424 save.__doc__ = StockPickler.save.__doc__
426 def dump(self, obj): #NOTE: if settings change, need to update attributes
427 logger.trace_setup(self)
428 StockPickler.dump(self, obj)
429 dump.__doc__ = StockPickler.dump.__doc__
431class Unpickler(StockUnpickler):
432 """python's Unpickler extended to interpreter sessions and more types"""
433 from .settings import settings
434 _session = False
436 def find_class(self, module, name):
437 if (module, name) == ('__builtin__', '__main__'):
438 return self._main.__dict__ #XXX: above set w/save_module_dict
439 elif (module, name) == ('__builtin__', 'NoneType'):
440 return type(None) #XXX: special case: NoneType missing
441 if module == 'dill.dill': module = 'dill._dill'
442 return StockUnpickler.find_class(self, module, name)
444 def __init__(self, *args, **kwds):
445 settings = Pickler.settings
446 _ignore = kwds.pop('ignore', None)
447 StockUnpickler.__init__(self, *args, **kwds)
448 self._main = _main_module
449 self._ignore = settings['ignore'] if _ignore is None else _ignore
451 def load(self): #NOTE: if settings change, need to update attributes
452 obj = StockUnpickler.load(self)
453 if type(obj).__module__ == getattr(_main_module, '__name__', '__main__'):
454 if not self._ignore:
455 # point obj class to main
456 try: obj.__class__ = getattr(self._main, type(obj).__name__)
457 except (AttributeError,TypeError): pass # defined in a file
458 #_main_module.__dict__.update(obj.__dict__) #XXX: should update globals ?
459 return obj
460 load.__doc__ = StockUnpickler.load.__doc__
461 pass
463'''
464def dispatch_table():
465 """get the dispatch table of registered types"""
466 return Pickler.dispatch
467'''
469pickle_dispatch_copy = StockPickler.dispatch.copy()
471def pickle(t, func):
472 """expose :attr:`~Pickler.dispatch` table for user-created extensions"""
473 Pickler.dispatch[t] = func
474 return
476def register(t):
477 """decorator to register types to Pickler's :attr:`~Pickler.dispatch` table"""
478 def proxy(func):
479 Pickler.dispatch[t] = func
480 return func
481 return proxy
483def _revert_extension():
484 """drop dill-registered types from pickle's dispatch table"""
485 for type, func in list(StockPickler.dispatch.items()):
486 if func.__module__ == __name__:
487 del StockPickler.dispatch[type]
488 if type in pickle_dispatch_copy:
489 StockPickler.dispatch[type] = pickle_dispatch_copy[type]
491def use_diff(on=True):
492 """
493 Reduces size of pickles by only including object which have changed.
495 Decreases pickle size but increases CPU time needed.
496 Also helps avoid some unpickleable objects.
497 MUST be called at start of script, otherwise changes will not be recorded.
498 """
499 global _use_diff, diff
500 _use_diff = on
501 if _use_diff and diff is None:
502 try:
503 from . import diff as d
504 except ImportError:
505 import diff as d
506 diff = d
508def _create_typemap():
509 import types
510 d = dict(list(__builtin__.__dict__.items()) + \
511 list(types.__dict__.items())).items()
512 for key, value in d:
513 if getattr(value, '__module__', None) == 'builtins' \
514 and type(value) is type:
515 yield key, value
516 return
517_reverse_typemap = dict(_create_typemap())
518_reverse_typemap.update({
519 'PartialType': PartialType,
520 'SuperType': SuperType,
521 'ItemGetterType': ItemGetterType,
522 'AttrGetterType': AttrGetterType,
523})
524if sys.hexversion < 0x30800a2:
525 _reverse_typemap.update({
526 'CellType': CellType,
527 })
529# "Incidental" implementation specific types. Unpickling these types in another
530# implementation of Python (PyPy -> CPython) is not guaranteed to work
532# This dictionary should contain all types that appear in Python implementations
533# but are not defined in https://docs.python.org/3/library/types.html#standard-interpreter-types
534x=OrderedDict()
535_incedental_reverse_typemap = {
536 'FileType': FileType,
537 'BufferedRandomType': BufferedRandomType,
538 'BufferedReaderType': BufferedReaderType,
539 'BufferedWriterType': BufferedWriterType,
540 'TextWrapperType': TextWrapperType,
541 'PyBufferedRandomType': PyBufferedRandomType,
542 'PyBufferedReaderType': PyBufferedReaderType,
543 'PyBufferedWriterType': PyBufferedWriterType,
544 'PyTextWrapperType': PyTextWrapperType,
545}
547_incedental_reverse_typemap.update({
548 "DictKeysType": type({}.keys()),
549 "DictValuesType": type({}.values()),
550 "DictItemsType": type({}.items()),
552 "OdictKeysType": type(x.keys()),
553 "OdictValuesType": type(x.values()),
554 "OdictItemsType": type(x.items()),
555})
557if ExitType:
558 _incedental_reverse_typemap['ExitType'] = ExitType
559if InputType:
560 _incedental_reverse_typemap['InputType'] = InputType
561 _incedental_reverse_typemap['OutputType'] = OutputType
563'''
564try:
565 import symtable
566 _incedental_reverse_typemap["SymtableEntryType"] = type(symtable.symtable("", "string", "exec")._table)
567except: #FIXME: fails to pickle
568 pass
570if sys.hexversion >= 0x30a00a0:
571 _incedental_reverse_typemap['LineIteratorType'] = type(compile('3', '', 'eval').co_lines())
572'''
574if sys.hexversion >= 0x30b00b0:
575 from types import GenericAlias
576 _incedental_reverse_typemap["GenericAliasIteratorType"] = type(iter(GenericAlias(list, (int,))))
577 '''
578 _incedental_reverse_typemap['PositionsIteratorType'] = type(compile('3', '', 'eval').co_positions())
579 '''
581try:
582 import winreg
583 _incedental_reverse_typemap["HKEYType"] = winreg.HKEYType
584except ImportError:
585 pass
587_reverse_typemap.update(_incedental_reverse_typemap)
588_incedental_types = set(_incedental_reverse_typemap.values())
590del x
592_typemap = dict((v, k) for k, v in _reverse_typemap.items())
594def _unmarshal(string):
595 return marshal.loads(string)
597def _load_type(name):
598 return _reverse_typemap[name]
600def _create_type(typeobj, *args):
601 return typeobj(*args)
603def _create_function(fcode, fglobals, fname=None, fdefaults=None,
604 fclosure=None, fdict=None, fkwdefaults=None):
605 # same as FunctionType, but enable passing __dict__ to new function,
606 # __dict__ is the storehouse for attributes added after function creation
607 func = FunctionType(fcode, fglobals or dict(), fname, fdefaults, fclosure)
608 if fdict is not None:
609 func.__dict__.update(fdict) #XXX: better copy? option to copy?
610 if fkwdefaults is not None:
611 func.__kwdefaults__ = fkwdefaults
612 # 'recurse' only stores referenced modules/objects in fglobals,
613 # thus we need to make sure that we have __builtins__ as well
614 if "__builtins__" not in func.__globals__:
615 func.__globals__["__builtins__"] = globals()["__builtins__"]
616 # assert id(fglobals) == id(func.__globals__)
617 return func
619class match:
620 """
621 Make avaialable a limited structural pattern matching-like syntax for Python < 3.10
623 Patterns can be only tuples (without types) currently.
624 Inspired by the package pattern-matching-PEP634.
626 Usage:
627 >>> with match(args) as m:
628 >>> if m.case(('x', 'y')):
629 >>> # use m.x and m.y
630 >>> elif m.case(('x', 'y', 'z')):
631 >>> # use m.x, m.y and m.z
633 Equivalent native code for Python >= 3.10:
634 >>> match args:
635 >>> case (x, y):
636 >>> # use x and y
637 >>> case (x, y, z):
638 >>> # use x, y and z
639 """
640 def __init__(self, value):
641 self.value = value
642 self._fields = None
643 def __enter__(self):
644 return self
645 def __exit__(self, *exc_info):
646 return False
647 def case(self, args): # *args, **kwargs):
648 """just handles tuple patterns"""
649 if len(self.value) != len(args): # + len(kwargs):
650 return False
651 #if not all(isinstance(arg, pat) for arg, pat in zip(self.value[len(args):], kwargs.values())):
652 # return False
653 self.args = args # (*args, *kwargs)
654 return True
655 @property
656 def fields(self):
657 # Only bind names to values if necessary.
658 if self._fields is None:
659 self._fields = dict(zip(self.args, self.value))
660 return self._fields
661 def __getattr__(self, item):
662 return self.fields[item]
664ALL_CODE_PARAMS = [
665 # Version New attribute CodeType parameters
666 ((3,11,'a'), 'co_endlinetable', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name qualname firstlineno linetable endlinetable columntable exceptiontable freevars cellvars'),
667 ((3,11), 'co_exceptiontable', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name qualname firstlineno linetable exceptiontable freevars cellvars'),
668 ((3,10), 'co_linetable', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name firstlineno linetable freevars cellvars'),
669 ((3,8), 'co_posonlyargcount', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name firstlineno lnotab freevars cellvars'),
670 ((3,7), 'co_kwonlyargcount', 'argcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name firstlineno lnotab freevars cellvars'),
671 ]
672for version, new_attr, params in ALL_CODE_PARAMS:
673 if hasattr(CodeType, new_attr):
674 CODE_VERSION = version
675 CODE_PARAMS = params.split()
676 break
677ENCODE_PARAMS = set(CODE_PARAMS).intersection(
678 ['code', 'lnotab', 'linetable', 'endlinetable', 'columntable', 'exceptiontable'])
680def _create_code(*args):
681 if not isinstance(args[0], int): # co_lnotab stored from >= 3.10
682 LNOTAB, *args = args
683 else: # from < 3.10 (or pre-LNOTAB storage)
684 LNOTAB = b''
686 with match(args) as m:
687 # Python 3.11/3.12a (18 members)
688 if m.case((
689 'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6]
690 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'qualname', 'firstlineno', # args[6:14]
691 'linetable', 'exceptiontable', 'freevars', 'cellvars' # args[14:]
692 )):
693 if CODE_VERSION == (3,11):
694 return CodeType(
695 *args[:6],
696 args[6].encode() if hasattr(args[6], 'encode') else args[6], # code
697 *args[7:14],
698 args[14].encode() if hasattr(args[14], 'encode') else args[14], # linetable
699 args[15].encode() if hasattr(args[15], 'encode') else args[15], # exceptiontable
700 args[16],
701 args[17],
702 )
703 fields = m.fields
704 # Python 3.10 or 3.8/3.9 (16 members)
705 elif m.case((
706 'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6]
707 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'firstlineno', # args[6:13]
708 'LNOTAB_OR_LINETABLE', 'freevars', 'cellvars' # args[13:]
709 )):
710 if CODE_VERSION == (3,10) or CODE_VERSION == (3,8):
711 return CodeType(
712 *args[:6],
713 args[6].encode() if hasattr(args[6], 'encode') else args[6], # code
714 *args[7:13],
715 args[13].encode() if hasattr(args[13], 'encode') else args[13], # lnotab/linetable
716 args[14],
717 args[15],
718 )
719 fields = m.fields
720 if CODE_VERSION >= (3,10):
721 fields['linetable'] = m.LNOTAB_OR_LINETABLE
722 else:
723 fields['lnotab'] = LNOTAB if LNOTAB else m.LNOTAB_OR_LINETABLE
724 # Python 3.7 (15 args)
725 elif m.case((
726 'argcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:5]
727 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'firstlineno', # args[5:12]
728 'lnotab', 'freevars', 'cellvars' # args[12:]
729 )):
730 if CODE_VERSION == (3,7):
731 return CodeType(
732 *args[:5],
733 args[5].encode() if hasattr(args[5], 'encode') else args[5], # code
734 *args[6:12],
735 args[12].encode() if hasattr(args[12], 'encode') else args[12], # lnotab
736 args[13],
737 args[14],
738 )
739 fields = m.fields
740 # Python 3.11a (20 members)
741 elif m.case((
742 'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6]
743 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'qualname', 'firstlineno', # args[6:14]
744 'linetable', 'endlinetable', 'columntable', 'exceptiontable', 'freevars', 'cellvars' # args[14:]
745 )):
746 if CODE_VERSION == (3,11,'a'):
747 return CodeType(
748 *args[:6],
749 args[6].encode() if hasattr(args[6], 'encode') else args[6], # code
750 *args[7:14],
751 *(a.encode() if hasattr(a, 'encode') else a for a in args[14:18]), # linetable-exceptiontable
752 args[18],
753 args[19],
754 )
755 fields = m.fields
756 else:
757 raise UnpicklingError("pattern match for code object failed")
759 # The args format doesn't match this version.
760 fields.setdefault('posonlyargcount', 0) # from python <= 3.7
761 fields.setdefault('lnotab', LNOTAB) # from python >= 3.10
762 fields.setdefault('linetable', b'') # from python <= 3.9
763 fields.setdefault('qualname', fields['name']) # from python <= 3.10
764 fields.setdefault('exceptiontable', b'') # from python <= 3.10
765 fields.setdefault('endlinetable', None) # from python != 3.11a
766 fields.setdefault('columntable', None) # from python != 3.11a
768 args = (fields[k].encode() if k in ENCODE_PARAMS and hasattr(fields[k], 'encode') else fields[k]
769 for k in CODE_PARAMS)
770 return CodeType(*args)
772def _create_ftype(ftypeobj, func, args, kwds):
773 if kwds is None:
774 kwds = {}
775 if args is None:
776 args = ()
777 return ftypeobj(func, *args, **kwds)
779def _create_typing_tuple(argz, *args): #NOTE: workaround python/cpython#94245
780 if not argz:
781 return typing.Tuple[()].copy_with(())
782 if argz == ((),):
783 return typing.Tuple[()]
784 return typing.Tuple[argz]
786if ThreadHandleType:
787 def _create_thread_handle(ident, done, *args): #XXX: ignores 'blocking'
788 from threading import _make_thread_handle
789 handle = _make_thread_handle(ident)
790 if done:
791 handle._set_done()
792 return handle
794def _create_lock(locked, *args): #XXX: ignores 'blocking'
795 from threading import Lock
796 lock = Lock()
797 if locked:
798 if not lock.acquire(False):
799 raise UnpicklingError("Cannot acquire lock")
800 return lock
802def _create_rlock(count, owner, *args): #XXX: ignores 'blocking'
803 lock = RLockType()
804 if owner is not None:
805 lock._acquire_restore((count, owner))
806 if owner and not lock._is_owned():
807 raise UnpicklingError("Cannot acquire lock")
808 return lock
810# thanks to matsjoyce for adding all the different file modes
811def _create_filehandle(name, mode, position, closed, open, strictio, fmode, fdata): # buffering=0
812 # only pickles the handle, not the file contents... good? or StringIO(data)?
813 # (for file contents see: http://effbot.org/librarybook/copy-reg.htm)
814 # NOTE: handle special cases first (are there more special cases?)
815 names = {'<stdin>':sys.__stdin__, '<stdout>':sys.__stdout__,
816 '<stderr>':sys.__stderr__} #XXX: better fileno=(0,1,2) ?
817 if name in list(names.keys()):
818 f = names[name] #XXX: safer "f=sys.stdin"
819 elif name == '<tmpfile>':
820 f = os.tmpfile()
821 elif name == '<fdopen>':
822 import tempfile
823 f = tempfile.TemporaryFile(mode)
824 else:
825 try:
826 exists = os.path.exists(name)
827 except Exception:
828 exists = False
829 if not exists:
830 if strictio:
831 raise FileNotFoundError("[Errno 2] No such file or directory: '%s'" % name)
832 elif "r" in mode and fmode != FILE_FMODE:
833 name = '<fdopen>' # or os.devnull?
834 current_size = 0 # or maintain position?
835 else:
836 current_size = os.path.getsize(name)
838 if position > current_size:
839 if strictio:
840 raise ValueError("invalid buffer size")
841 elif fmode == CONTENTS_FMODE:
842 position = current_size
843 # try to open the file by name
844 # NOTE: has different fileno
845 try:
846 #FIXME: missing: *buffering*, encoding, softspace
847 if fmode == FILE_FMODE:
848 f = open(name, mode if "w" in mode else "w")
849 f.write(fdata)
850 if "w" not in mode:
851 f.close()
852 f = open(name, mode)
853 elif name == '<fdopen>': # file did not exist
854 import tempfile
855 f = tempfile.TemporaryFile(mode)
856 # treat x mode as w mode
857 elif fmode == CONTENTS_FMODE \
858 and ("w" in mode or "x" in mode):
859 # stop truncation when opening
860 flags = os.O_CREAT
861 if "+" in mode:
862 flags |= os.O_RDWR
863 else:
864 flags |= os.O_WRONLY
865 f = os.fdopen(os.open(name, flags), mode)
866 # set name to the correct value
867 r = getattr(f, "buffer", f)
868 r = getattr(r, "raw", r)
869 r.name = name
870 assert f.name == name
871 else:
872 f = open(name, mode)
873 except (IOError, FileNotFoundError):
874 err = sys.exc_info()[1]
875 raise UnpicklingError(err)
876 if closed:
877 f.close()
878 elif position >= 0 and fmode != HANDLE_FMODE:
879 f.seek(position)
880 return f
882def _create_stringi(value, position, closed):
883 f = StringIO(value)
884 if closed: f.close()
885 else: f.seek(position)
886 return f
888def _create_stringo(value, position, closed):
889 f = StringIO()
890 if closed: f.close()
891 else:
892 f.write(value)
893 f.seek(position)
894 return f
896class _itemgetter_helper(object):
897 def __init__(self):
898 self.items = []
899 def __getitem__(self, item):
900 self.items.append(item)
901 return
903class _attrgetter_helper(object):
904 def __init__(self, attrs, index=None):
905 self.attrs = attrs
906 self.index = index
907 def __getattribute__(self, attr):
908 attrs = object.__getattribute__(self, "attrs")
909 index = object.__getattribute__(self, "index")
910 if index is None:
911 index = len(attrs)
912 attrs.append(attr)
913 else:
914 attrs[index] = ".".join([attrs[index], attr])
915 return type(self)(attrs, index)
917class _dictproxy_helper(dict):
918 def __ror__(self, a):
919 return a
921_dictproxy_helper_instance = _dictproxy_helper()
923__d = {}
924try:
925 # In CPython 3.9 and later, this trick can be used to exploit the
926 # implementation of the __or__ function of MappingProxyType to get the true
927 # mapping referenced by the proxy. It may work for other implementations,
928 # but is not guaranteed.
929 MAPPING_PROXY_TRICK = __d is (DictProxyType(__d) | _dictproxy_helper_instance)
930except Exception:
931 MAPPING_PROXY_TRICK = False
932del __d
934# _CELL_REF and _CELL_EMPTY are used to stay compatible with versions of dill
935# whose _create_cell functions do not have a default value.
936# _CELL_REF can be safely removed entirely (replaced by empty tuples for calls
937# to _create_cell) once breaking changes are allowed.
938_CELL_REF = None
939_CELL_EMPTY = Sentinel('_CELL_EMPTY')
941def _create_cell(contents=None):
942 if contents is not _CELL_EMPTY:
943 value = contents
944 return (lambda: value).__closure__[0]
946def _create_weakref(obj, *args):
947 from weakref import ref
948 if obj is None: # it's dead
949 from collections import UserDict
950 return ref(UserDict(), *args)
951 return ref(obj, *args)
953def _create_weakproxy(obj, callable=False, *args):
954 from weakref import proxy
955 if obj is None: # it's dead
956 if callable: return proxy(lambda x:x, *args)
957 from collections import UserDict
958 return proxy(UserDict(), *args)
959 return proxy(obj, *args)
961def _eval_repr(repr_str):
962 return eval(repr_str)
964def _create_array(f, args, state, npdict=None):
965 #array = numpy.core.multiarray._reconstruct(*args)
966 array = f(*args)
967 array.__setstate__(state)
968 if npdict is not None: # we also have saved state in __dict__
969 array.__dict__.update(npdict)
970 return array
972def _create_dtypemeta(scalar_type):
973 if NumpyDType is True: __hook__() # a bit hacky I think
974 if scalar_type is None:
975 return NumpyDType
976 return type(NumpyDType(scalar_type))
978def _create_namedtuple(name, fieldnames, modulename, defaults=None):
979 class_ = _import_module(modulename + '.' + name, safe=True)
980 if class_ is not None:
981 return class_
982 import collections
983 t = collections.namedtuple(name, fieldnames, defaults=defaults, module=modulename)
984 return t
986def _create_capsule(pointer, name, context, destructor):
987 attr_found = False
988 try:
989 # based on https://github.com/python/cpython/blob/f4095e53ab708d95e019c909d5928502775ba68f/Objects/capsule.c#L209-L231
990 uname = name.decode('utf8')
991 for i in range(1, uname.count('.')+1):
992 names = uname.rsplit('.', i)
993 try:
994 module = __import__(names[0])
995 except ImportError:
996 pass
997 obj = module
998 for attr in names[1:]:
999 obj = getattr(obj, attr)
1000 capsule = obj
1001 attr_found = True
1002 break
1003 except Exception:
1004 pass
1006 if attr_found:
1007 if _PyCapsule_IsValid(capsule, name):
1008 return capsule
1009 raise UnpicklingError("%s object exists at %s but a PyCapsule object was expected." % (type(capsule), name))
1010 else:
1011 #warnings.warn('Creating a new PyCapsule %s for a C data structure that may not be present in memory. Segmentation faults or other memory errors are possible.' % (name,), UnpicklingWarning)
1012 capsule = _PyCapsule_New(pointer, name, destructor)
1013 _PyCapsule_SetContext(capsule, context)
1014 return capsule
1016def _getattr(objclass, name, repr_str):
1017 # hack to grab the reference directly
1018 try: #XXX: works only for __builtin__ ?
1019 attr = repr_str.split("'")[3]
1020 return eval(attr+'.__dict__["'+name+'"]')
1021 except Exception:
1022 try:
1023 attr = objclass.__dict__
1024 if type(attr) is DictProxyType:
1025 attr = attr[name]
1026 else:
1027 attr = getattr(objclass,name)
1028 except (AttributeError, KeyError):
1029 attr = getattr(objclass,name)
1030 return attr
1032def _get_attr(self, name):
1033 # stop recursive pickling
1034 return getattr(self, name, None) or getattr(__builtin__, name)
1036def _import_module(import_name, safe=False):
1037 try:
1038 if import_name.startswith('__runtime__.'):
1039 return sys.modules[import_name]
1040 elif '.' in import_name:
1041 items = import_name.split('.')
1042 module = '.'.join(items[:-1])
1043 obj = items[-1]
1044 submodule = getattr(__import__(module, None, None, [obj]), obj)
1045 if isinstance(submodule, (ModuleType, type)):
1046 return submodule
1047 return __import__(import_name, None, None, [obj])
1048 else:
1049 return __import__(import_name)
1050 except (ImportError, AttributeError, KeyError):
1051 if safe:
1052 return None
1053 raise
1055# https://github.com/python/cpython/blob/a8912a0f8d9eba6d502c37d522221f9933e976db/Lib/pickle.py#L322-L333
1056def _getattribute(obj, name):
1057 for subpath in name.split('.'):
1058 if subpath == '<locals>':
1059 raise AttributeError("Can't get local attribute {!r} on {!r}"
1060 .format(name, obj))
1061 try:
1062 parent = obj
1063 obj = getattr(obj, subpath)
1064 except AttributeError:
1065 raise AttributeError("Can't get attribute {!r} on {!r}"
1066 .format(name, obj))
1067 return obj, parent
1069def _locate_function(obj, pickler=None):
1070 module_name = getattr(obj, '__module__', None)
1071 if module_name in ['__main__', None] or \
1072 pickler and is_dill(pickler, child=False) and pickler._session and module_name == pickler._main.__name__:
1073 return False
1074 if hasattr(obj, '__qualname__'):
1075 module = _import_module(module_name, safe=True)
1076 try:
1077 found, _ = _getattribute(module, obj.__qualname__)
1078 return found is obj
1079 except AttributeError:
1080 return False
1081 else:
1082 found = _import_module(module_name + '.' + obj.__name__, safe=True)
1083 return found is obj
1086def _setitems(dest, source):
1087 for k, v in source.items():
1088 dest[k] = v
1091def _save_with_postproc(pickler, reduction, is_pickler_dill=None, obj=Getattr.NO_DEFAULT, postproc_list=None):
1092 if obj is Getattr.NO_DEFAULT:
1093 obj = Reduce(reduction) # pragma: no cover
1095 if is_pickler_dill is None:
1096 is_pickler_dill = is_dill(pickler, child=True)
1097 if is_pickler_dill:
1098 # assert id(obj) not in pickler._postproc, str(obj) + ' already pushed on stack!'
1099 # if not hasattr(pickler, 'x'): pickler.x = 0
1100 # print(pickler.x*' ', 'push', obj, id(obj), pickler._recurse)
1101 # pickler.x += 1
1102 if postproc_list is None:
1103 postproc_list = []
1105 # Recursive object not supported. Default to a global instead.
1106 if id(obj) in pickler._postproc:
1107 name = '%s.%s ' % (obj.__module__, getattr(obj, '__qualname__', obj.__name__)) if hasattr(obj, '__module__') else ''
1108 warnings.warn('Cannot pickle %r: %shas recursive self-references that trigger a RecursionError.' % (obj, name), PicklingWarning)
1109 pickler.save_global(obj)
1110 return
1111 pickler._postproc[id(obj)] = postproc_list
1113 # TODO: Use state_setter in Python 3.8 to allow for faster cPickle implementations
1114 pickler.save_reduce(*reduction, obj=obj)
1116 if is_pickler_dill:
1117 # pickler.x -= 1
1118 # print(pickler.x*' ', 'pop', obj, id(obj))
1119 postproc = pickler._postproc.pop(id(obj))
1120 # assert postproc_list == postproc, 'Stack tampered!'
1121 for reduction in reversed(postproc):
1122 if reduction[0] is _setitems:
1123 # use the internal machinery of pickle.py to speedup when
1124 # updating a dictionary in postproc
1125 dest, source = reduction[1]
1126 if source:
1127 pickler.write(pickler.get(pickler.memo[id(dest)][0]))
1128 pickler._batch_setitems(iter(source.items()))
1129 else:
1130 # Updating with an empty dictionary. Same as doing nothing.
1131 continue
1132 else:
1133 pickler.save_reduce(*reduction)
1134 # pop None created by calling preprocessing step off stack
1135 pickler.write(POP)
1137#@register(CodeType)
1138#def save_code(pickler, obj):
1139# logger.trace(pickler, "Co: %s", obj)
1140# pickler.save_reduce(_unmarshal, (marshal.dumps(obj),), obj=obj)
1141# logger.trace(pickler, "# Co")
1142# return
1144# The following function is based on 'save_codeobject' from 'cloudpickle'
1145# Copyright (c) 2012, Regents of the University of California.
1146# Copyright (c) 2009 `PiCloud, Inc. <http://www.picloud.com>`_.
1147# License: https://github.com/cloudpipe/cloudpickle/blob/master/LICENSE
1148@register(CodeType)
1149def save_code(pickler, obj):
1150 logger.trace(pickler, "Co: %s", obj)
1151 if hasattr(obj, "co_endlinetable"): # python 3.11a (20 args)
1152 args = (
1153 obj.co_lnotab, # for < python 3.10 [not counted in args]
1154 obj.co_argcount, obj.co_posonlyargcount,
1155 obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize,
1156 obj.co_flags, obj.co_code, obj.co_consts, obj.co_names,
1157 obj.co_varnames, obj.co_filename, obj.co_name, obj.co_qualname,
1158 obj.co_firstlineno, obj.co_linetable, obj.co_endlinetable,
1159 obj.co_columntable, obj.co_exceptiontable, obj.co_freevars,
1160 obj.co_cellvars
1161 )
1162 elif hasattr(obj, "co_exceptiontable"): # python 3.11 (18 args)
1163 with warnings.catch_warnings():
1164 if not OLD312a7: # issue 597
1165 warnings.filterwarnings('ignore', category=DeprecationWarning)
1166 args = (
1167 obj.co_lnotab, # for < python 3.10 [not counted in args]
1168 obj.co_argcount, obj.co_posonlyargcount,
1169 obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize,
1170 obj.co_flags, obj.co_code, obj.co_consts, obj.co_names,
1171 obj.co_varnames, obj.co_filename, obj.co_name, obj.co_qualname,
1172 obj.co_firstlineno, obj.co_linetable, obj.co_exceptiontable,
1173 obj.co_freevars, obj.co_cellvars
1174 )
1175 elif hasattr(obj, "co_linetable"): # python 3.10 (16 args)
1176 args = (
1177 obj.co_lnotab, # for < python 3.10 [not counted in args]
1178 obj.co_argcount, obj.co_posonlyargcount,
1179 obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize,
1180 obj.co_flags, obj.co_code, obj.co_consts, obj.co_names,
1181 obj.co_varnames, obj.co_filename, obj.co_name,
1182 obj.co_firstlineno, obj.co_linetable, obj.co_freevars,
1183 obj.co_cellvars
1184 )
1185 elif hasattr(obj, "co_posonlyargcount"): # python 3.8 (16 args)
1186 args = (
1187 obj.co_argcount, obj.co_posonlyargcount,
1188 obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize,
1189 obj.co_flags, obj.co_code, obj.co_consts, obj.co_names,
1190 obj.co_varnames, obj.co_filename, obj.co_name,
1191 obj.co_firstlineno, obj.co_lnotab, obj.co_freevars,
1192 obj.co_cellvars
1193 )
1194 else: # python 3.7 (15 args)
1195 args = (
1196 obj.co_argcount, obj.co_kwonlyargcount, obj.co_nlocals,
1197 obj.co_stacksize, obj.co_flags, obj.co_code, obj.co_consts,
1198 obj.co_names, obj.co_varnames, obj.co_filename,
1199 obj.co_name, obj.co_firstlineno, obj.co_lnotab,
1200 obj.co_freevars, obj.co_cellvars
1201 )
1203 pickler.save_reduce(_create_code, args, obj=obj)
1204 logger.trace(pickler, "# Co")
1205 return
1207def _repr_dict(obj):
1208 """Make a short string representation of a dictionary."""
1209 return "<%s object at %#012x>" % (type(obj).__name__, id(obj))
1211@register(dict)
1212def save_module_dict(pickler, obj):
1213 if is_dill(pickler, child=False) and obj == pickler._main.__dict__ and \
1214 not (pickler._session and pickler._first_pass):
1215 logger.trace(pickler, "D1: %s", _repr_dict(obj)) # obj
1216 pickler.write(bytes('c__builtin__\n__main__\n', 'UTF-8'))
1217 logger.trace(pickler, "# D1")
1218 elif (not is_dill(pickler, child=False)) and (obj == _main_module.__dict__):
1219 logger.trace(pickler, "D3: %s", _repr_dict(obj)) # obj
1220 pickler.write(bytes('c__main__\n__dict__\n', 'UTF-8')) #XXX: works in general?
1221 logger.trace(pickler, "# D3")
1222 elif '__name__' in obj and obj != _main_module.__dict__ \
1223 and type(obj['__name__']) is str \
1224 and obj is getattr(_import_module(obj['__name__'],True), '__dict__', None):
1225 logger.trace(pickler, "D4: %s", _repr_dict(obj)) # obj
1226 pickler.write(bytes('c%s\n__dict__\n' % obj['__name__'], 'UTF-8'))
1227 logger.trace(pickler, "# D4")
1228 else:
1229 logger.trace(pickler, "D2: %s", _repr_dict(obj)) # obj
1230 if is_dill(pickler, child=False) and pickler._session:
1231 # we only care about session the first pass thru
1232 pickler._first_pass = False
1233 StockPickler.save_dict(pickler, obj)
1234 logger.trace(pickler, "# D2")
1235 return
1238if not OLD310 and MAPPING_PROXY_TRICK:
1239 def save_dict_view(dicttype):
1240 def save_dict_view_for_function(func):
1241 def _save_dict_view(pickler, obj):
1242 logger.trace(pickler, "Dkvi: <%s>", obj)
1243 mapping = obj.mapping | _dictproxy_helper_instance
1244 pickler.save_reduce(func, (mapping,), obj=obj)
1245 logger.trace(pickler, "# Dkvi")
1246 return _save_dict_view
1247 return [
1248 (funcname, save_dict_view_for_function(getattr(dicttype, funcname)))
1249 for funcname in ('keys', 'values', 'items')
1250 ]
1251else:
1252 # The following functions are based on 'cloudpickle'
1253 # https://github.com/cloudpipe/cloudpickle/blob/5d89947288a18029672596a4d719093cc6d5a412/cloudpickle/cloudpickle.py#L922-L940
1254 # Copyright (c) 2012, Regents of the University of California.
1255 # Copyright (c) 2009 `PiCloud, Inc. <http://www.picloud.com>`_.
1256 # License: https://github.com/cloudpipe/cloudpickle/blob/master/LICENSE
1257 def save_dict_view(dicttype):
1258 def save_dict_keys(pickler, obj):
1259 logger.trace(pickler, "Dk: <%s>", obj)
1260 dict_constructor = _shims.Reduce(dicttype.fromkeys, (list(obj),))
1261 pickler.save_reduce(dicttype.keys, (dict_constructor,), obj=obj)
1262 logger.trace(pickler, "# Dk")
1264 def save_dict_values(pickler, obj):
1265 logger.trace(pickler, "Dv: <%s>", obj)
1266 dict_constructor = _shims.Reduce(dicttype, (enumerate(obj),))
1267 pickler.save_reduce(dicttype.values, (dict_constructor,), obj=obj)
1268 logger.trace(pickler, "# Dv")
1270 def save_dict_items(pickler, obj):
1271 logger.trace(pickler, "Di: <%s>", obj)
1272 pickler.save_reduce(dicttype.items, (dicttype(obj),), obj=obj)
1273 logger.trace(pickler, "# Di")
1275 return (
1276 ('keys', save_dict_keys),
1277 ('values', save_dict_values),
1278 ('items', save_dict_items)
1279 )
1281for __dicttype in (
1282 dict,
1283 OrderedDict
1284):
1285 __obj = __dicttype()
1286 for __funcname, __savefunc in save_dict_view(__dicttype):
1287 __tview = type(getattr(__obj, __funcname)())
1288 if __tview not in Pickler.dispatch:
1289 Pickler.dispatch[__tview] = __savefunc
1290del __dicttype, __obj, __funcname, __tview, __savefunc
1293@register(ClassType)
1294def save_classobj(pickler, obj): #FIXME: enable pickler._byref
1295 if not _locate_function(obj, pickler):
1296 logger.trace(pickler, "C1: %s", obj)
1297 pickler.save_reduce(ClassType, (obj.__name__, obj.__bases__,
1298 obj.__dict__), obj=obj)
1299 #XXX: or obj.__dict__.copy()), obj=obj) ?
1300 logger.trace(pickler, "# C1")
1301 else:
1302 logger.trace(pickler, "C2: %s", obj)
1303 name = getattr(obj, '__qualname__', getattr(obj, '__name__', None))
1304 StockPickler.save_global(pickler, obj, name=name)
1305 logger.trace(pickler, "# C2")
1306 return
1308@register(typing._GenericAlias)
1309def save_generic_alias(pickler, obj):
1310 args = obj.__args__
1311 if type(obj.__reduce__()) is str:
1312 logger.trace(pickler, "Ga0: %s", obj)
1313 StockPickler.save_global(pickler, obj, name=obj.__reduce__())
1314 logger.trace(pickler, "# Ga0")
1315 elif obj.__origin__ is tuple and (not args or args == ((),)):
1316 logger.trace(pickler, "Ga1: %s", obj)
1317 pickler.save_reduce(_create_typing_tuple, (args,), obj=obj)
1318 logger.trace(pickler, "# Ga1")
1319 else:
1320 logger.trace(pickler, "Ga2: %s", obj)
1321 StockPickler.save_reduce(pickler, *obj.__reduce__(), obj=obj)
1322 logger.trace(pickler, "# Ga2")
1323 return
1325if ThreadHandleType:
1326 @register(ThreadHandleType)
1327 def save_thread_handle(pickler, obj):
1328 logger.trace(pickler, "Th: %s", obj)
1329 pickler.save_reduce(_create_thread_handle, (obj.ident, obj.is_done()), obj=obj)
1330 logger.trace(pickler, "# Th")
1331 return
1333@register(LockType) #XXX: copied Thread will have new Event (due to new Lock)
1334def save_lock(pickler, obj):
1335 logger.trace(pickler, "Lo: %s", obj)
1336 pickler.save_reduce(_create_lock, (obj.locked(),), obj=obj)
1337 logger.trace(pickler, "# Lo")
1338 return
1340@register(RLockType)
1341def save_rlock(pickler, obj):
1342 logger.trace(pickler, "RL: %s", obj)
1343 r = obj.__repr__() # don't use _release_save as it unlocks the lock
1344 count = int(r.split('count=')[1].split()[0].rstrip('>'))
1345 owner = int(r.split('owner=')[1].split()[0])
1346 pickler.save_reduce(_create_rlock, (count,owner,), obj=obj)
1347 logger.trace(pickler, "# RL")
1348 return
1350#@register(SocketType) #FIXME: causes multiprocess test_pickling FAIL
1351def save_socket(pickler, obj):
1352 logger.trace(pickler, "So: %s", obj)
1353 pickler.save_reduce(*reduce_socket(obj))
1354 logger.trace(pickler, "# So")
1355 return
1357def _save_file(pickler, obj, open_):
1358 if obj.closed:
1359 position = 0
1360 else:
1361 obj.flush()
1362 if obj in (sys.__stdout__, sys.__stderr__, sys.__stdin__):
1363 position = -1
1364 else:
1365 position = obj.tell()
1366 if is_dill(pickler, child=True) and pickler._fmode == FILE_FMODE:
1367 f = open_(obj.name, "r")
1368 fdata = f.read()
1369 f.close()
1370 else:
1371 fdata = ""
1372 if is_dill(pickler, child=True):
1373 strictio = pickler._strictio
1374 fmode = pickler._fmode
1375 else:
1376 strictio = False
1377 fmode = 0 # HANDLE_FMODE
1378 pickler.save_reduce(_create_filehandle, (obj.name, obj.mode, position,
1379 obj.closed, open_, strictio,
1380 fmode, fdata), obj=obj)
1381 return
1384@register(FileType) #XXX: in 3.x has buffer=0, needs different _create?
1385@register(BufferedReaderType)
1386@register(BufferedWriterType)
1387@register(TextWrapperType)
1388def save_file(pickler, obj):
1389 logger.trace(pickler, "Fi: %s", obj)
1390 f = _save_file(pickler, obj, open)
1391 logger.trace(pickler, "# Fi")
1392 return f
1394if BufferedRandomType:
1395 @register(BufferedRandomType)
1396 def save_file(pickler, obj):
1397 logger.trace(pickler, "Fi: %s", obj)
1398 f = _save_file(pickler, obj, open)
1399 logger.trace(pickler, "# Fi")
1400 return f
1402if PyTextWrapperType:
1403 @register(PyBufferedReaderType)
1404 @register(PyBufferedWriterType)
1405 @register(PyTextWrapperType)
1406 def save_file(pickler, obj):
1407 logger.trace(pickler, "Fi: %s", obj)
1408 f = _save_file(pickler, obj, _open)
1409 logger.trace(pickler, "# Fi")
1410 return f
1412 if PyBufferedRandomType:
1413 @register(PyBufferedRandomType)
1414 def save_file(pickler, obj):
1415 logger.trace(pickler, "Fi: %s", obj)
1416 f = _save_file(pickler, obj, _open)
1417 logger.trace(pickler, "# Fi")
1418 return f
1421# The following two functions are based on 'saveCStringIoInput'
1422# and 'saveCStringIoOutput' from spickle
1423# Copyright (c) 2011 by science+computing ag
1424# License: http://www.apache.org/licenses/LICENSE-2.0
1425if InputType:
1426 @register(InputType)
1427 def save_stringi(pickler, obj):
1428 logger.trace(pickler, "Io: %s", obj)
1429 if obj.closed:
1430 value = ''; position = 0
1431 else:
1432 value = obj.getvalue(); position = obj.tell()
1433 pickler.save_reduce(_create_stringi, (value, position, \
1434 obj.closed), obj=obj)
1435 logger.trace(pickler, "# Io")
1436 return
1438 @register(OutputType)
1439 def save_stringo(pickler, obj):
1440 logger.trace(pickler, "Io: %s", obj)
1441 if obj.closed:
1442 value = ''; position = 0
1443 else:
1444 value = obj.getvalue(); position = obj.tell()
1445 pickler.save_reduce(_create_stringo, (value, position, \
1446 obj.closed), obj=obj)
1447 logger.trace(pickler, "# Io")
1448 return
1450if LRUCacheType is not None:
1451 from functools import lru_cache
1452 @register(LRUCacheType)
1453 def save_lru_cache(pickler, obj):
1454 logger.trace(pickler, "LRU: %s", obj)
1455 if OLD39:
1456 kwargs = obj.cache_info()
1457 args = (kwargs.maxsize,)
1458 else:
1459 kwargs = obj.cache_parameters()
1460 args = (kwargs['maxsize'], kwargs['typed'])
1461 if args != lru_cache.__defaults__:
1462 wrapper = Reduce(lru_cache, args, is_callable=True)
1463 else:
1464 wrapper = lru_cache
1465 pickler.save_reduce(wrapper, (obj.__wrapped__,), obj=obj)
1466 logger.trace(pickler, "# LRU")
1467 return
1469@register(SuperType)
1470def save_super(pickler, obj):
1471 logger.trace(pickler, "Su: %s", obj)
1472 pickler.save_reduce(super, (obj.__thisclass__, obj.__self__), obj=obj)
1473 logger.trace(pickler, "# Su")
1474 return
1476if IS_PYPY:
1477 @register(MethodType)
1478 def save_instancemethod0(pickler, obj):
1479 code = getattr(obj.__func__, '__code__', None)
1480 if code is not None and type(code) is not CodeType \
1481 and getattr(obj.__self__, obj.__name__) == obj:
1482 # Some PyPy builtin functions have no module name
1483 logger.trace(pickler, "Me2: %s", obj)
1484 # TODO: verify that this works for all PyPy builtin methods
1485 pickler.save_reduce(getattr, (obj.__self__, obj.__name__), obj=obj)
1486 logger.trace(pickler, "# Me2")
1487 return
1489 logger.trace(pickler, "Me1: %s", obj)
1490 pickler.save_reduce(MethodType, (obj.__func__, obj.__self__), obj=obj)
1491 logger.trace(pickler, "# Me1")
1492 return
1493else:
1494 @register(MethodType)
1495 def save_instancemethod0(pickler, obj):
1496 logger.trace(pickler, "Me1: %s", obj)
1497 pickler.save_reduce(MethodType, (obj.__func__, obj.__self__), obj=obj)
1498 logger.trace(pickler, "# Me1")
1499 return
1501if not IS_PYPY:
1502 @register(MemberDescriptorType)
1503 @register(GetSetDescriptorType)
1504 @register(MethodDescriptorType)
1505 @register(WrapperDescriptorType)
1506 @register(ClassMethodDescriptorType)
1507 def save_wrapper_descriptor(pickler, obj):
1508 logger.trace(pickler, "Wr: %s", obj)
1509 pickler.save_reduce(_getattr, (obj.__objclass__, obj.__name__,
1510 obj.__repr__()), obj=obj)
1511 logger.trace(pickler, "# Wr")
1512 return
1513else:
1514 @register(MemberDescriptorType)
1515 @register(GetSetDescriptorType)
1516 def save_wrapper_descriptor(pickler, obj):
1517 logger.trace(pickler, "Wr: %s", obj)
1518 pickler.save_reduce(_getattr, (obj.__objclass__, obj.__name__,
1519 obj.__repr__()), obj=obj)
1520 logger.trace(pickler, "# Wr")
1521 return
1523@register(CellType)
1524def save_cell(pickler, obj):
1525 try:
1526 f = obj.cell_contents
1527 except ValueError: # cell is empty
1528 logger.trace(pickler, "Ce3: %s", obj)
1529 # _shims._CELL_EMPTY is defined in _shims.py to support PyPy 2.7.
1530 # It unpickles to a sentinel object _dill._CELL_EMPTY, also created in
1531 # _shims.py. This object is not present in Python 3 because the cell's
1532 # contents can be deleted in newer versions of Python. The reduce object
1533 # will instead unpickle to None if unpickled in Python 3.
1535 # When breaking changes are made to dill, (_shims._CELL_EMPTY,) can
1536 # be replaced by () OR the delattr function can be removed repending on
1537 # whichever is more convienient.
1538 pickler.save_reduce(_create_cell, (_shims._CELL_EMPTY,), obj=obj)
1539 # Call the function _delattr on the cell's cell_contents attribute
1540 # The result of this function call will be None
1541 pickler.save_reduce(_shims._delattr, (obj, 'cell_contents'))
1542 # pop None created by calling _delattr off stack
1543 pickler.write(POP)
1544 logger.trace(pickler, "# Ce3")
1545 return
1546 if is_dill(pickler, child=True):
1547 if id(f) in pickler._postproc:
1548 # Already seen. Add to its postprocessing.
1549 postproc = pickler._postproc[id(f)]
1550 else:
1551 # Haven't seen it. Add to the highest possible object and set its
1552 # value as late as possible to prevent cycle.
1553 postproc = next(iter(pickler._postproc.values()), None)
1554 if postproc is not None:
1555 logger.trace(pickler, "Ce2: %s", obj)
1556 # _CELL_REF is defined in _shims.py to support older versions of
1557 # dill. When breaking changes are made to dill, (_CELL_REF,) can
1558 # be replaced by ()
1559 pickler.save_reduce(_create_cell, (_CELL_REF,), obj=obj)
1560 postproc.append((_shims._setattr, (obj, 'cell_contents', f)))
1561 logger.trace(pickler, "# Ce2")
1562 return
1563 logger.trace(pickler, "Ce1: %s", obj)
1564 pickler.save_reduce(_create_cell, (f,), obj=obj)
1565 logger.trace(pickler, "# Ce1")
1566 return
1568if MAPPING_PROXY_TRICK:
1569 @register(DictProxyType)
1570 def save_dictproxy(pickler, obj):
1571 logger.trace(pickler, "Mp: %s", _repr_dict(obj)) # obj
1572 mapping = obj | _dictproxy_helper_instance
1573 pickler.save_reduce(DictProxyType, (mapping,), obj=obj)
1574 logger.trace(pickler, "# Mp")
1575 return
1576else:
1577 @register(DictProxyType)
1578 def save_dictproxy(pickler, obj):
1579 logger.trace(pickler, "Mp: %s", _repr_dict(obj)) # obj
1580 pickler.save_reduce(DictProxyType, (obj.copy(),), obj=obj)
1581 logger.trace(pickler, "# Mp")
1582 return
1584@register(SliceType)
1585def save_slice(pickler, obj):
1586 logger.trace(pickler, "Sl: %s", obj)
1587 pickler.save_reduce(slice, (obj.start, obj.stop, obj.step), obj=obj)
1588 logger.trace(pickler, "# Sl")
1589 return
1591@register(XRangeType)
1592@register(EllipsisType)
1593@register(NotImplementedType)
1594def save_singleton(pickler, obj):
1595 logger.trace(pickler, "Si: %s", obj)
1596 pickler.save_reduce(_eval_repr, (obj.__repr__(),), obj=obj)
1597 logger.trace(pickler, "# Si")
1598 return
1600def _proxy_helper(obj): # a dead proxy returns a reference to None
1601 """get memory address of proxy's reference object"""
1602 _repr = repr(obj)
1603 try: _str = str(obj)
1604 except ReferenceError: # it's a dead proxy
1605 return id(None)
1606 if _str == _repr: return id(obj) # it's a repr
1607 try: # either way, it's a proxy from here
1608 address = int(_str.rstrip('>').split(' at ')[-1], base=16)
1609 except ValueError: # special case: proxy of a 'type'
1610 if not IS_PYPY:
1611 address = int(_repr.rstrip('>').split(' at ')[-1], base=16)
1612 else:
1613 objects = iter(gc.get_objects())
1614 for _obj in objects:
1615 if repr(_obj) == _str: return id(_obj)
1616 # all bad below... nothing found so throw ReferenceError
1617 msg = "Cannot reference object for proxy at '%s'" % id(obj)
1618 raise ReferenceError(msg)
1619 return address
1621def _locate_object(address, module=None):
1622 """get object located at the given memory address (inverse of id(obj))"""
1623 special = [None, True, False] #XXX: more...?
1624 for obj in special:
1625 if address == id(obj): return obj
1626 if module:
1627 objects = iter(module.__dict__.values())
1628 else: objects = iter(gc.get_objects())
1629 for obj in objects:
1630 if address == id(obj): return obj
1631 # all bad below... nothing found so throw ReferenceError or TypeError
1632 try: address = hex(address)
1633 except TypeError:
1634 raise TypeError("'%s' is not a valid memory address" % str(address))
1635 raise ReferenceError("Cannot reference object at '%s'" % address)
1637@register(ReferenceType)
1638def save_weakref(pickler, obj):
1639 refobj = obj()
1640 logger.trace(pickler, "R1: %s", obj)
1641 #refobj = ctypes.pythonapi.PyWeakref_GetObject(obj) # dead returns "None"
1642 pickler.save_reduce(_create_weakref, (refobj,), obj=obj)
1643 logger.trace(pickler, "# R1")
1644 return
1646@register(ProxyType)
1647@register(CallableProxyType)
1648def save_weakproxy(pickler, obj):
1649 # Must do string substitution here and use %r to avoid ReferenceError.
1650 logger.trace(pickler, "R2: %r" % obj)
1651 refobj = _locate_object(_proxy_helper(obj))
1652 pickler.save_reduce(_create_weakproxy, (refobj, callable(obj)), obj=obj)
1653 logger.trace(pickler, "# R2")
1654 return
1656def _is_builtin_module(module):
1657 if not hasattr(module, "__file__"): return True
1658 if module.__file__ is None: return False
1659 # If a module file name starts with prefix, it should be a builtin
1660 # module, so should always be pickled as a reference.
1661 names = ["base_prefix", "base_exec_prefix", "exec_prefix", "prefix", "real_prefix"]
1662 rp = os.path.realpath
1663 # See https://github.com/uqfoundation/dill/issues/566
1664 return (
1665 any(
1666 module.__file__.startswith(getattr(sys, name))
1667 or rp(module.__file__).startswith(rp(getattr(sys, name)))
1668 for name in names
1669 if hasattr(sys, name)
1670 )
1671 or module.__file__.endswith(EXTENSION_SUFFIXES)
1672 or 'site-packages' in module.__file__
1673 )
1675def _is_imported_module(module):
1676 return getattr(module, '__loader__', None) is not None or module in sys.modules.values()
1678@register(ModuleType)
1679def save_module(pickler, obj):
1680 if False: #_use_diff:
1681 if obj.__name__.split('.', 1)[0] != "dill":
1682 try:
1683 changed = diff.whats_changed(obj, seen=pickler._diff_cache)[0]
1684 except RuntimeError: # not memorised module, probably part of dill
1685 pass
1686 else:
1687 logger.trace(pickler, "M2: %s with diff", obj)
1688 logger.info("Diff: %s", changed.keys())
1689 pickler.save_reduce(_import_module, (obj.__name__,), obj=obj,
1690 state=changed)
1691 logger.trace(pickler, "# M2")
1692 return
1694 logger.trace(pickler, "M1: %s", obj)
1695 pickler.save_reduce(_import_module, (obj.__name__,), obj=obj)
1696 logger.trace(pickler, "# M1")
1697 else:
1698 builtin_mod = _is_builtin_module(obj)
1699 is_session_main = is_dill(pickler, child=True) and obj is pickler._main
1700 if (obj.__name__ not in ("builtins", "dill", "dill._dill") and not builtin_mod
1701 or is_session_main):
1702 logger.trace(pickler, "M1: %s", obj)
1703 # Hack for handling module-type objects in load_module().
1704 mod_name = obj.__name__ if _is_imported_module(obj) else '__runtime__.%s' % obj.__name__
1705 # Second references are saved as __builtin__.__main__ in save_module_dict().
1706 main_dict = obj.__dict__.copy()
1707 for item in ('__builtins__', '__loader__'):
1708 main_dict.pop(item, None)
1709 for item in IPYTHON_SINGLETONS: #pragma: no cover
1710 if getattr(main_dict.get(item), '__module__', '').startswith('IPython'):
1711 del main_dict[item]
1712 pickler.save_reduce(_import_module, (mod_name,), obj=obj, state=main_dict)
1713 logger.trace(pickler, "# M1")
1714 elif obj.__name__ == "dill._dill":
1715 logger.trace(pickler, "M2: %s", obj)
1716 pickler.save_global(obj, name="_dill")
1717 logger.trace(pickler, "# M2")
1718 else:
1719 logger.trace(pickler, "M2: %s", obj)
1720 pickler.save_reduce(_import_module, (obj.__name__,), obj=obj)
1721 logger.trace(pickler, "# M2")
1722 return
1724# The following function is based on '_extract_class_dict' from 'cloudpickle'
1725# Copyright (c) 2012, Regents of the University of California.
1726# Copyright (c) 2009 `PiCloud, Inc. <http://www.picloud.com>`_.
1727# License: https://github.com/cloudpipe/cloudpickle/blob/master/LICENSE
1728def _get_typedict_type(cls, clsdict, attrs, postproc_list):
1729 """Retrieve a copy of the dict of a class without the inherited methods"""
1730 if len(cls.__bases__) == 1:
1731 inherited_dict = cls.__bases__[0].__dict__
1732 else:
1733 inherited_dict = {}
1734 for base in reversed(cls.__bases__):
1735 inherited_dict.update(base.__dict__)
1736 to_remove = []
1737 for name, value in dict.items(clsdict):
1738 try:
1739 base_value = inherited_dict[name]
1740 if value is base_value and hasattr(value, '__qualname__'):
1741 to_remove.append(name)
1742 except KeyError:
1743 pass
1744 for name in to_remove:
1745 dict.pop(clsdict, name)
1747 if issubclass(type(cls), type):
1748 clsdict.pop('__dict__', None)
1749 clsdict.pop('__weakref__', None)
1750 # clsdict.pop('__prepare__', None)
1751 return clsdict, attrs
1753def _get_typedict_abc(obj, _dict, attrs, postproc_list):
1754 if hasattr(abc, '_get_dump'):
1755 (registry, _, _, _) = abc._get_dump(obj)
1756 register = obj.register
1757 postproc_list.extend((register, (reg(),)) for reg in registry)
1758 elif hasattr(obj, '_abc_registry'):
1759 registry = obj._abc_registry
1760 register = obj.register
1761 postproc_list.extend((register, (reg,)) for reg in registry)
1762 else:
1763 raise PicklingError("Cannot find registry of ABC %s", obj)
1765 if '_abc_registry' in _dict:
1766 _dict.pop('_abc_registry', None)
1767 _dict.pop('_abc_cache', None)
1768 _dict.pop('_abc_negative_cache', None)
1769 # _dict.pop('_abc_negative_cache_version', None)
1770 else:
1771 _dict.pop('_abc_impl', None)
1772 return _dict, attrs
1774@register(TypeType)
1775def save_type(pickler, obj, postproc_list=None):
1776 if obj in _typemap:
1777 logger.trace(pickler, "T1: %s", obj)
1778 # if obj in _incedental_types:
1779 # warnings.warn('Type %r may only exist on this implementation of Python and cannot be unpickled in other implementations.' % (obj,), PicklingWarning)
1780 pickler.save_reduce(_load_type, (_typemap[obj],), obj=obj)
1781 logger.trace(pickler, "# T1")
1782 elif obj.__bases__ == (tuple,) and all([hasattr(obj, attr) for attr in ('_fields','_asdict','_make','_replace')]):
1783 # special case: namedtuples
1784 logger.trace(pickler, "T6: %s", obj)
1786 obj_name = getattr(obj, '__qualname__', getattr(obj, '__name__', None))
1787 if obj.__name__ != obj_name:
1788 if postproc_list is None:
1789 postproc_list = []
1790 postproc_list.append((setattr, (obj, '__qualname__', obj_name)))
1792 if not obj._field_defaults:
1793 _save_with_postproc(pickler, (_create_namedtuple, (obj.__name__, obj._fields, obj.__module__)), obj=obj, postproc_list=postproc_list)
1794 else:
1795 defaults = [obj._field_defaults[field] for field in obj._fields if field in obj._field_defaults]
1796 _save_with_postproc(pickler, (_create_namedtuple, (obj.__name__, obj._fields, obj.__module__, defaults)), obj=obj, postproc_list=postproc_list)
1797 logger.trace(pickler, "# T6")
1798 return
1800 # special caes: NoneType, NotImplementedType, EllipsisType, EnumMeta, etc
1801 elif obj is type(None):
1802 logger.trace(pickler, "T7: %s", obj)
1803 #XXX: pickler.save_reduce(type, (None,), obj=obj)
1804 pickler.write(GLOBAL + b'__builtin__\nNoneType\n')
1805 logger.trace(pickler, "# T7")
1806 elif obj is NotImplementedType:
1807 logger.trace(pickler, "T7: %s", obj)
1808 pickler.save_reduce(type, (NotImplemented,), obj=obj)
1809 logger.trace(pickler, "# T7")
1810 elif obj is EllipsisType:
1811 logger.trace(pickler, "T7: %s", obj)
1812 pickler.save_reduce(type, (Ellipsis,), obj=obj)
1813 logger.trace(pickler, "# T7")
1814 elif obj is EnumMeta:
1815 logger.trace(pickler, "T7: %s", obj)
1816 pickler.write(GLOBAL + b'enum\nEnumMeta\n')
1817 logger.trace(pickler, "# T7")
1818 elif obj is ExceptHookArgsType: #NOTE: must be after NoneType for pypy
1819 logger.trace(pickler, "T7: %s", obj)
1820 pickler.write(GLOBAL + b'threading\nExceptHookArgs\n')
1821 logger.trace(pickler, "# T7")
1823 else:
1824 _byref = getattr(pickler, '_byref', None)
1825 obj_recursive = id(obj) in getattr(pickler, '_postproc', ())
1826 incorrectly_named = not _locate_function(obj, pickler)
1827 if not _byref and not obj_recursive and incorrectly_named: # not a function, but the name was held over
1828 if postproc_list is None:
1829 postproc_list = []
1831 # thanks to Tom Stepleton pointing out pickler._session unneeded
1832 logger.trace(pickler, "T2: %s", obj)
1833 _dict, attrs = _get_typedict_type(obj, obj.__dict__.copy(), None, postproc_list) # copy dict proxy to a dict
1835 #print (_dict)
1836 #print ("%s\n%s" % (type(obj), obj.__name__))
1837 #print ("%s\n%s" % (obj.__bases__, obj.__dict__))
1838 slots = _dict.get('__slots__', ())
1839 if type(slots) == str:
1840 # __slots__ accepts a single string
1841 slots = (slots,)
1843 for name in slots:
1844 _dict.pop(name, None)
1846 if isinstance(obj, abc.ABCMeta):
1847 logger.trace(pickler, "ABC: %s", obj)
1848 _dict, attrs = _get_typedict_abc(obj, _dict, attrs, postproc_list)
1849 logger.trace(pickler, "# ABC")
1851 qualname = getattr(obj, '__qualname__', None)
1852 if attrs is not None:
1853 for k, v in attrs.items():
1854 postproc_list.append((setattr, (obj, k, v)))
1855 # TODO: Consider using the state argument to save_reduce?
1856 if qualname is not None:
1857 postproc_list.append((setattr, (obj, '__qualname__', qualname)))
1859 if not hasattr(obj, '__orig_bases__'):
1860 _save_with_postproc(pickler, (_create_type, (
1861 type(obj), obj.__name__, obj.__bases__, _dict
1862 )), obj=obj, postproc_list=postproc_list)
1863 else:
1864 # This case will always work, but might be overkill.
1865 _metadict = {
1866 'metaclass': type(obj)
1867 }
1869 if _dict:
1870 _dict_update = PartialType(_setitems, source=_dict)
1871 else:
1872 _dict_update = None
1874 _save_with_postproc(pickler, (new_class, (
1875 obj.__name__, obj.__orig_bases__, _metadict, _dict_update
1876 )), obj=obj, postproc_list=postproc_list)
1877 logger.trace(pickler, "# T2")
1878 else:
1879 obj_name = getattr(obj, '__qualname__', getattr(obj, '__name__', None))
1880 logger.trace(pickler, "T4: %s", obj)
1881 if incorrectly_named:
1882 warnings.warn(
1883 "Cannot locate reference to %r." % (obj,),
1884 PicklingWarning,
1885 stacklevel=3,
1886 )
1887 if obj_recursive:
1888 warnings.warn(
1889 "Cannot pickle %r: %s.%s has recursive self-references that "
1890 "trigger a RecursionError." % (obj, obj.__module__, obj_name),
1891 PicklingWarning,
1892 stacklevel=3,
1893 )
1894 #print (obj.__dict__)
1895 #print ("%s\n%s" % (type(obj), obj.__name__))
1896 #print ("%s\n%s" % (obj.__bases__, obj.__dict__))
1897 StockPickler.save_global(pickler, obj, name=obj_name)
1898 logger.trace(pickler, "# T4")
1899 return
1901@register(property)
1902@register(abc.abstractproperty)
1903def save_property(pickler, obj):
1904 logger.trace(pickler, "Pr: %s", obj)
1905 pickler.save_reduce(type(obj), (obj.fget, obj.fset, obj.fdel, obj.__doc__),
1906 obj=obj)
1907 logger.trace(pickler, "# Pr")
1909@register(staticmethod)
1910@register(classmethod)
1911@register(abc.abstractstaticmethod)
1912@register(abc.abstractclassmethod)
1913def save_classmethod(pickler, obj):
1914 logger.trace(pickler, "Cm: %s", obj)
1915 orig_func = obj.__func__
1917 # if type(obj.__dict__) is dict:
1918 # if obj.__dict__:
1919 # state = obj.__dict__
1920 # else:
1921 # state = None
1922 # else:
1923 # state = (None, {'__dict__', obj.__dict__})
1925 pickler.save_reduce(type(obj), (orig_func,), obj=obj)
1926 logger.trace(pickler, "# Cm")
1928@register(FunctionType)
1929def save_function(pickler, obj):
1930 if not _locate_function(obj, pickler):
1931 if type(obj.__code__) is not CodeType:
1932 # Some PyPy builtin functions have no module name, and thus are not
1933 # able to be located
1934 module_name = getattr(obj, '__module__', None)
1935 if module_name is None:
1936 module_name = __builtin__.__name__
1937 module = _import_module(module_name, safe=True)
1938 _pypy_builtin = False
1939 try:
1940 found, _ = _getattribute(module, obj.__qualname__)
1941 if getattr(found, '__func__', None) is obj:
1942 _pypy_builtin = True
1943 except AttributeError:
1944 pass
1946 if _pypy_builtin:
1947 logger.trace(pickler, "F3: %s", obj)
1948 pickler.save_reduce(getattr, (found, '__func__'), obj=obj)
1949 logger.trace(pickler, "# F3")
1950 return
1952 logger.trace(pickler, "F1: %s", obj)
1953 _recurse = getattr(pickler, '_recurse', None)
1954 _postproc = getattr(pickler, '_postproc', None)
1955 _main_modified = getattr(pickler, '_main_modified', None)
1956 _original_main = getattr(pickler, '_original_main', __builtin__)#'None'
1957 postproc_list = []
1958 if _recurse:
1959 # recurse to get all globals referred to by obj
1960 from .detect import globalvars
1961 globs_copy = globalvars(obj, recurse=True, builtin=True)
1963 # Add the name of the module to the globs dictionary to prevent
1964 # the duplication of the dictionary. Pickle the unpopulated
1965 # globals dictionary and set the remaining items after the function
1966 # is created to correctly handle recursion.
1967 globs = {'__name__': obj.__module__}
1968 else:
1969 globs_copy = obj.__globals__
1971 # If the globals is the __dict__ from the module being saved as a
1972 # session, substitute it by the dictionary being actually saved.
1973 if _main_modified and globs_copy is _original_main.__dict__:
1974 globs_copy = getattr(pickler, '_main', _original_main).__dict__
1975 globs = globs_copy
1976 # If the globals is a module __dict__, do not save it in the pickle.
1977 elif globs_copy is not None and obj.__module__ is not None and \
1978 getattr(_import_module(obj.__module__, True), '__dict__', None) is globs_copy:
1979 globs = globs_copy
1980 else:
1981 globs = {'__name__': obj.__module__}
1983 if globs_copy is not None and globs is not globs_copy:
1984 # In the case that the globals are copied, we need to ensure that
1985 # the globals dictionary is updated when all objects in the
1986 # dictionary are already created.
1987 glob_ids = {id(g) for g in globs_copy.values()}
1988 for stack_element in _postproc:
1989 if stack_element in glob_ids:
1990 _postproc[stack_element].append((_setitems, (globs, globs_copy)))
1991 break
1992 else:
1993 postproc_list.append((_setitems, (globs, globs_copy)))
1995 closure = obj.__closure__
1996 state_dict = {}
1997 for fattrname in ('__doc__', '__kwdefaults__', '__annotations__'):
1998 fattr = getattr(obj, fattrname, None)
1999 if fattr is not None:
2000 state_dict[fattrname] = fattr
2001 if obj.__qualname__ != obj.__name__:
2002 state_dict['__qualname__'] = obj.__qualname__
2003 if '__name__' not in globs or obj.__module__ != globs['__name__']:
2004 state_dict['__module__'] = obj.__module__
2006 state = obj.__dict__
2007 if type(state) is not dict:
2008 state_dict['__dict__'] = state
2009 state = None
2010 if state_dict:
2011 state = state, state_dict
2013 _save_with_postproc(pickler, (_create_function, (
2014 obj.__code__, globs, obj.__name__, obj.__defaults__,
2015 closure
2016 ), state), obj=obj, postproc_list=postproc_list)
2018 # Lift closure cell update to earliest function (#458)
2019 if _postproc:
2020 topmost_postproc = next(iter(_postproc.values()), None)
2021 if closure and topmost_postproc:
2022 for cell in closure:
2023 possible_postproc = (setattr, (cell, 'cell_contents', obj))
2024 try:
2025 topmost_postproc.remove(possible_postproc)
2026 except ValueError:
2027 continue
2029 # Change the value of the cell
2030 pickler.save_reduce(*possible_postproc)
2031 # pop None created by calling preprocessing step off stack
2032 pickler.write(POP)
2034 logger.trace(pickler, "# F1")
2035 else:
2036 logger.trace(pickler, "F2: %s", obj)
2037 name = getattr(obj, '__qualname__', getattr(obj, '__name__', None))
2038 StockPickler.save_global(pickler, obj, name=name)
2039 logger.trace(pickler, "# F2")
2040 return
2042if HAS_CTYPES and hasattr(ctypes, 'pythonapi'):
2043 _PyCapsule_New = ctypes.pythonapi.PyCapsule_New
2044 _PyCapsule_New.argtypes = (ctypes.c_void_p, ctypes.c_char_p, ctypes.c_void_p)
2045 _PyCapsule_New.restype = ctypes.py_object
2046 _PyCapsule_GetPointer = ctypes.pythonapi.PyCapsule_GetPointer
2047 _PyCapsule_GetPointer.argtypes = (ctypes.py_object, ctypes.c_char_p)
2048 _PyCapsule_GetPointer.restype = ctypes.c_void_p
2049 _PyCapsule_GetDestructor = ctypes.pythonapi.PyCapsule_GetDestructor
2050 _PyCapsule_GetDestructor.argtypes = (ctypes.py_object,)
2051 _PyCapsule_GetDestructor.restype = ctypes.c_void_p
2052 _PyCapsule_GetContext = ctypes.pythonapi.PyCapsule_GetContext
2053 _PyCapsule_GetContext.argtypes = (ctypes.py_object,)
2054 _PyCapsule_GetContext.restype = ctypes.c_void_p
2055 _PyCapsule_GetName = ctypes.pythonapi.PyCapsule_GetName
2056 _PyCapsule_GetName.argtypes = (ctypes.py_object,)
2057 _PyCapsule_GetName.restype = ctypes.c_char_p
2058 _PyCapsule_IsValid = ctypes.pythonapi.PyCapsule_IsValid
2059 _PyCapsule_IsValid.argtypes = (ctypes.py_object, ctypes.c_char_p)
2060 _PyCapsule_IsValid.restype = ctypes.c_bool
2061 _PyCapsule_SetContext = ctypes.pythonapi.PyCapsule_SetContext
2062 _PyCapsule_SetContext.argtypes = (ctypes.py_object, ctypes.c_void_p)
2063 _PyCapsule_SetDestructor = ctypes.pythonapi.PyCapsule_SetDestructor
2064 _PyCapsule_SetDestructor.argtypes = (ctypes.py_object, ctypes.c_void_p)
2065 _PyCapsule_SetName = ctypes.pythonapi.PyCapsule_SetName
2066 _PyCapsule_SetName.argtypes = (ctypes.py_object, ctypes.c_char_p)
2067 _PyCapsule_SetPointer = ctypes.pythonapi.PyCapsule_SetPointer
2068 _PyCapsule_SetPointer.argtypes = (ctypes.py_object, ctypes.c_void_p)
2069 #from _socket import CAPI as _testcapsule
2070 _testcapsule_name = b'dill._dill._testcapsule'
2071 _testcapsule = _PyCapsule_New(
2072 ctypes.cast(_PyCapsule_New, ctypes.c_void_p),
2073 ctypes.c_char_p(_testcapsule_name),
2074 None
2075 )
2076 PyCapsuleType = type(_testcapsule)
2077 @register(PyCapsuleType)
2078 def save_capsule(pickler, obj):
2079 logger.trace(pickler, "Cap: %s", obj)
2080 name = _PyCapsule_GetName(obj)
2081 #warnings.warn('Pickling a PyCapsule (%s) does not pickle any C data structures and could cause segmentation faults or other memory errors when unpickling.' % (name,), PicklingWarning)
2082 pointer = _PyCapsule_GetPointer(obj, name)
2083 context = _PyCapsule_GetContext(obj)
2084 destructor = _PyCapsule_GetDestructor(obj)
2085 pickler.save_reduce(_create_capsule, (pointer, name, context, destructor), obj=obj)
2086 logger.trace(pickler, "# Cap")
2087 _incedental_reverse_typemap['PyCapsuleType'] = PyCapsuleType
2088 _reverse_typemap['PyCapsuleType'] = PyCapsuleType
2089 _incedental_types.add(PyCapsuleType)
2090else:
2091 _testcapsule = None
2094#############################
2095# A quick fix for issue #500
2096# This should be removed when a better solution is found.
2098if hasattr(dataclasses, "_HAS_DEFAULT_FACTORY_CLASS"):
2099 @register(dataclasses._HAS_DEFAULT_FACTORY_CLASS)
2100 def save_dataclasses_HAS_DEFAULT_FACTORY_CLASS(pickler, obj):
2101 logger.trace(pickler, "DcHDF: %s", obj)
2102 pickler.write(GLOBAL + b"dataclasses\n_HAS_DEFAULT_FACTORY\n")
2103 logger.trace(pickler, "# DcHDF")
2105if hasattr(dataclasses, "MISSING"):
2106 @register(type(dataclasses.MISSING))
2107 def save_dataclasses_MISSING_TYPE(pickler, obj):
2108 logger.trace(pickler, "DcM: %s", obj)
2109 pickler.write(GLOBAL + b"dataclasses\nMISSING\n")
2110 logger.trace(pickler, "# DcM")
2112if hasattr(dataclasses, "KW_ONLY"):
2113 @register(type(dataclasses.KW_ONLY))
2114 def save_dataclasses_KW_ONLY_TYPE(pickler, obj):
2115 logger.trace(pickler, "DcKWO: %s", obj)
2116 pickler.write(GLOBAL + b"dataclasses\nKW_ONLY\n")
2117 logger.trace(pickler, "# DcKWO")
2119if hasattr(dataclasses, "_FIELD_BASE"):
2120 @register(dataclasses._FIELD_BASE)
2121 def save_dataclasses_FIELD_BASE(pickler, obj):
2122 logger.trace(pickler, "DcFB: %s", obj)
2123 pickler.write(GLOBAL + b"dataclasses\n" + obj.name.encode() + b"\n")
2124 logger.trace(pickler, "# DcFB")
2126#############################
2128# quick sanity checking
2129def pickles(obj,exact=False,safe=False,**kwds):
2130 """
2131 Quick check if object pickles with dill.
2133 If *exact=True* then an equality test is done to check if the reconstructed
2134 object matches the original object.
2136 If *safe=True* then any exception will raised in copy signal that the
2137 object is not picklable, otherwise only pickling errors will be trapped.
2139 Additional keyword arguments are as :func:`dumps` and :func:`loads`.
2140 """
2141 if safe: exceptions = (Exception,) # RuntimeError, ValueError
2142 else:
2143 exceptions = (TypeError, AssertionError, NotImplementedError, PicklingError, UnpicklingError)
2144 try:
2145 pik = copy(obj, **kwds)
2146 #FIXME: should check types match first, then check content if "exact"
2147 try:
2148 #FIXME: should be "(pik == obj).all()" for numpy comparison, though that'll fail if shapes differ
2149 result = bool(pik.all() == obj.all())
2150 except (AttributeError, TypeError):
2151 warnings.filterwarnings('ignore') #FIXME: be specific
2152 result = pik == obj
2153 if warnings.filters: del warnings.filters[0]
2154 if hasattr(result, 'toarray'): # for unusual types like sparse matrix
2155 result = result.toarray().all()
2156 if result: return True
2157 if not exact:
2158 result = type(pik) == type(obj)
2159 if result: return result
2160 # class instances might have been dumped with byref=False
2161 return repr(type(pik)) == repr(type(obj)) #XXX: InstanceType?
2162 return False
2163 except exceptions:
2164 return False
2166def check(obj, *args, **kwds):
2167 """
2168 Check pickling of an object across another process.
2170 *python* is the path to the python interpreter (defaults to sys.executable)
2172 Set *verbose=True* to print the unpickled object in the other process.
2174 Additional keyword arguments are as :func:`dumps` and :func:`loads`.
2175 """
2176 # == undocumented ==
2177 # python -- the string path or executable name of the selected python
2178 # verbose -- if True, be verbose about printing warning messages
2179 # all other args and kwds are passed to dill.dumps #FIXME: ignore on load
2180 verbose = kwds.pop('verbose', False)
2181 python = kwds.pop('python', None)
2182 if python is None:
2183 import sys
2184 python = sys.executable
2185 # type check
2186 isinstance(python, str)
2187 import subprocess
2188 fail = True
2189 try:
2190 _obj = dumps(obj, *args, **kwds)
2191 fail = False
2192 finally:
2193 if fail and verbose:
2194 print("DUMP FAILED")
2195 #FIXME: fails if python interpreter path contains spaces
2196 # Use the following instead (which also processes the 'ignore' keyword):
2197 # ignore = kwds.pop('ignore', None)
2198 # unpickle = "dill.loads(%s, ignore=%s)"%(repr(_obj), repr(ignore))
2199 # cmd = [python, "-c", "import dill; print(%s)"%unpickle]
2200 # msg = "SUCCESS" if not subprocess.call(cmd) else "LOAD FAILED"
2201 msg = "%s -c import dill; print(dill.loads(%s))" % (python, repr(_obj))
2202 msg = "SUCCESS" if not subprocess.call(msg.split(None,2)) else "LOAD FAILED"
2203 if verbose:
2204 print(msg)
2205 return
2207# use to protect against missing attributes
2208def is_dill(pickler, child=None):
2209 "check the dill-ness of your pickler"
2210 if child is False or not hasattr(pickler.__class__, 'mro'):
2211 return 'dill' in pickler.__module__
2212 return Pickler in pickler.__class__.mro()
2214def _extend():
2215 """extend pickle with all of dill's registered types"""
2216 # need to have pickle not choke on _main_module? use is_dill(pickler)
2217 for t,func in Pickler.dispatch.items():
2218 try:
2219 StockPickler.dispatch[t] = func
2220 except Exception: #TypeError, PicklingError, UnpicklingError
2221 logger.trace(pickler, "skip: %s", t)
2222 return
2224del diff, _use_diff, use_diff
2226# EOF