Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.10/site-packages/dill/_dill.py: 41%
Shortcuts on this page
r m x toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
Shortcuts on this page
r m x toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1# -*- coding: utf-8 -*-
2#
3# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
4# Copyright (c) 2008-2015 California Institute of Technology.
5# Copyright (c) 2016-2025 The Uncertainty Quantification Foundation.
6# License: 3-clause BSD. The full license text is available at:
7# - https://github.com/uqfoundation/dill/blob/master/LICENSE
8"""
9dill: a utility for serialization of python objects
11The primary functions in `dill` are :func:`dump` and
12:func:`dumps` for serialization ("pickling") to a
13file or to a string, respectively, and :func:`load`
14and :func:`loads` for deserialization ("unpickling"),
15similarly, from a file or from a string. Other notable
16functions are :func:`~dill.dump_module` and
17:func:`~dill.load_module`, which are used to save and
18restore module objects, including an intepreter session.
20Based on code written by Oren Tirosh and Armin Ronacher.
21Extended to a (near) full set of the builtin types (in types module),
22and coded to the pickle interface, by <mmckerns@caltech.edu>.
23Initial port to python3 by Jonathan Dobson, continued by mmckerns.
24Tested against "all" python types (Std. Lib. CH 1-15 @ 2.7) by mmckerns.
25Tested against CH16+ Std. Lib. ... TBD.
26"""
28from __future__ import annotations
30__all__ = [
31 'dump','dumps','load','loads','copy',
32 'Pickler','Unpickler','register','pickle','pickles','check',
33 'DEFAULT_PROTOCOL','HIGHEST_PROTOCOL','HANDLE_FMODE','CONTENTS_FMODE','FILE_FMODE',
34 'PickleError','PickleWarning','PicklingError','PicklingWarning','UnpicklingError',
35 'UnpicklingWarning',
36]
38__module__ = 'dill'
40import warnings
41from .logger import adapter as logger
42from .logger import trace as _trace
43log = logger # backward compatibility (see issue #582)
45import os
46import sys
47diff = None
48_use_diff = False
49OLD38 = (sys.hexversion < 0x3080000)
50OLD39 = (sys.hexversion < 0x3090000)
51OLD310 = (sys.hexversion < 0x30a0000)
52OLD312a7 = (sys.hexversion < 0x30c00a7)
53#XXX: get types from .objtypes ?
54import builtins as __builtin__
55from pickle import _Pickler as StockPickler, Unpickler as StockUnpickler
56from pickle import GLOBAL, POP
57from _contextvars import Context as ContextType
58from _thread import LockType
59from _thread import RLock as RLockType
60try:
61 from _thread import _ExceptHookArgs as ExceptHookArgsType
62except ImportError:
63 ExceptHookArgsType = None
64try:
65 from _thread import _ThreadHandle as ThreadHandleType
66except ImportError:
67 ThreadHandleType = None
68#from io import IOBase
69from types import CodeType, FunctionType, MethodType, GeneratorType, \
70 TracebackType, FrameType, ModuleType, BuiltinMethodType
71BufferType = memoryview #XXX: unregistered
72ClassType = type # no 'old-style' classes
73EllipsisType = type(Ellipsis)
74#FileType = IOBase
75NotImplementedType = type(NotImplemented)
76SliceType = slice
77TypeType = type # 'new-style' classes #XXX: unregistered
78XRangeType = range
79from types import MappingProxyType as DictProxyType, new_class
80from pickle import DEFAULT_PROTOCOL, HIGHEST_PROTOCOL, PickleError, PicklingError, UnpicklingError
81import __main__ as _main_module
82import marshal
83import gc
84# import zlib
85import abc
86import dataclasses
87from weakref import ReferenceType, ProxyType, CallableProxyType
88from collections import OrderedDict
89from enum import Enum, EnumMeta
90from functools import partial
91from operator import itemgetter, attrgetter
92GENERATOR_FAIL = False
93import importlib.machinery
94EXTENSION_SUFFIXES = tuple(importlib.machinery.EXTENSION_SUFFIXES)
95try:
96 import ctypes
97 HAS_CTYPES = True
98 # if using `pypy`, pythonapi is not found
99 IS_PYPY = not hasattr(ctypes, 'pythonapi')
100except ImportError:
101 HAS_CTYPES = False
102 IS_PYPY = False
103NumpyUfuncType = None
104NumpyDType = None
105NumpyArrayType = None
106try:
107 if not importlib.machinery.PathFinder().find_spec('numpy'):
108 raise ImportError("No module named 'numpy'")
109 NumpyUfuncType = True
110 NumpyDType = True
111 NumpyArrayType = True
112except ImportError:
113 pass
114def __hook__():
115 global NumpyArrayType, NumpyDType, NumpyUfuncType
116 from numpy import ufunc as NumpyUfuncType
117 from numpy import ndarray as NumpyArrayType
118 from numpy import dtype as NumpyDType
119 return True
120if NumpyArrayType: # then has numpy
121 def ndarraysubclassinstance(obj_type):
122 if all((c.__module__, c.__name__) != ('numpy', 'ndarray') for c in obj_type.__mro__):
123 return False
124 # anything below here is a numpy array (or subclass) instance
125 __hook__() # import numpy (so the following works!!!)
126 # verify that __reduce__ has not been overridden
127 if obj_type.__reduce_ex__ is not NumpyArrayType.__reduce_ex__ \
128 or obj_type.__reduce__ is not NumpyArrayType.__reduce__:
129 return False
130 return True
131 def numpyufunc(obj_type):
132 return any((c.__module__, c.__name__) == ('numpy', 'ufunc') for c in obj_type.__mro__)
133 def numpydtype(obj_type):
134 if all((c.__module__, c.__name__) != ('numpy', 'dtype') for c in obj_type.__mro__):
135 return False
136 # anything below here is a numpy dtype
137 __hook__() # import numpy (so the following works!!!)
138 return obj_type is type(NumpyDType) # handles subclasses
139else:
140 def ndarraysubclassinstance(obj): return False
141 def numpyufunc(obj): return False
142 def numpydtype(obj): return False
144from types import GetSetDescriptorType, ClassMethodDescriptorType, \
145 WrapperDescriptorType, MethodDescriptorType, MemberDescriptorType, \
146 MethodWrapperType #XXX: unused
148# make sure to add these 'hand-built' types to _typemap
149CellType = type((lambda x: lambda y: x)(0).__closure__[0])
150PartialType = type(partial(int, base=2))
151SuperType = type(super(Exception, TypeError()))
152ItemGetterType = type(itemgetter(0))
153AttrGetterType = type(attrgetter('__repr__'))
155try:
156 from functools import _lru_cache_wrapper as LRUCacheType
157except ImportError:
158 LRUCacheType = None
160if not isinstance(LRUCacheType, type):
161 LRUCacheType = None
163def get_file_type(*args, **kwargs):
164 open = kwargs.pop("open", __builtin__.open)
165 f = open(os.devnull, *args, **kwargs)
166 t = type(f)
167 f.close()
168 return t
170IS_PYODIDE = sys.platform == 'emscripten'
172FileType = get_file_type('rb', buffering=0)
173TextWrapperType = get_file_type('r', buffering=-1)
174BufferedRandomType = None if IS_PYODIDE else get_file_type('r+b', buffering=-1)
175BufferedReaderType = get_file_type('rb', buffering=-1)
176BufferedWriterType = get_file_type('wb', buffering=-1)
177try:
178 from _pyio import open as _open
179 PyTextWrapperType = get_file_type('r', buffering=-1, open=_open)
180 PyBufferedRandomType = None if IS_PYODIDE else get_file_type('r+b', buffering=-1, open=_open)
181 PyBufferedReaderType = get_file_type('rb', buffering=-1, open=_open)
182 PyBufferedWriterType = get_file_type('wb', buffering=-1, open=_open)
183except ImportError:
184 PyTextWrapperType = PyBufferedRandomType = PyBufferedReaderType = PyBufferedWriterType = None
185from io import BytesIO as StringIO
186InputType = OutputType = None
187from socket import socket as SocketType
188#FIXME: additionally calls ForkingPickler.register several times
189from multiprocessing.reduction import _reduce_socket as reduce_socket
190try: #pragma: no cover
191 IS_IPYTHON = __IPYTHON__ # is True
192 ExitType = None # IPython.core.autocall.ExitAutocall
193 IPYTHON_SINGLETONS = ('exit', 'quit', 'get_ipython')
194except NameError:
195 IS_IPYTHON = False
196 try: ExitType = type(exit) # apparently 'exit' can be removed
197 except NameError: ExitType = None
198 IPYTHON_SINGLETONS = ()
200import inspect
201import typing
204### Shims for different versions of Python and dill
205class Sentinel(object):
206 """
207 Create a unique sentinel object that is pickled as a constant.
208 """
209 def __init__(self, name, module_name=None):
210 self.name = name
211 if module_name is None:
212 # Use the calling frame's module
213 self.__module__ = inspect.currentframe().f_back.f_globals['__name__']
214 else:
215 self.__module__ = module_name # pragma: no cover
216 def __repr__(self):
217 return self.__module__ + '.' + self.name # pragma: no cover
218 def __copy__(self):
219 return self # pragma: no cover
220 def __deepcopy__(self, memo):
221 return self # pragma: no cover
222 def __reduce__(self):
223 return self.name
224 def __reduce_ex__(self, protocol):
225 return self.name
227from . import _shims
228from ._shims import Reduce, Getattr
230### File modes
231#: Pickles the file handle, preserving mode. The position of the unpickled
232#: object is as for a new file handle.
233HANDLE_FMODE = 0
234#: Pickles the file contents, creating a new file if on load the file does
235#: not exist. The position = min(pickled position, EOF) and mode is chosen
236#: as such that "best" preserves behavior of the original file.
237CONTENTS_FMODE = 1
238#: Pickles the entire file (handle and contents), preserving mode and position.
239FILE_FMODE = 2
241### Shorthands (modified from python2.5/lib/pickle.py)
242def copy(obj, *args, **kwds):
243 """
244 Use pickling to 'copy' an object (i.e. `loads(dumps(obj))`).
246 See :func:`dumps` and :func:`loads` for keyword arguments.
247 """
248 ignore = kwds.pop('ignore', Unpickler.settings['ignore'])
249 return loads(dumps(obj, *args, **kwds), ignore=ignore)
251def dump(obj, file, protocol=None, byref=None, fmode=None, recurse=None, **kwds):#, strictio=None):
252 """
253 Pickle an object to a file.
255 See :func:`dumps` for keyword arguments.
256 """
257 from .settings import settings
258 protocol = settings['protocol'] if protocol is None else int(protocol)
259 _kwds = kwds.copy()
260 _kwds.update(dict(byref=byref, fmode=fmode, recurse=recurse))
261 Pickler(file, protocol, **_kwds).dump(obj)
262 return
264def dumps(obj, protocol=None, byref=None, fmode=None, recurse=None, **kwds):#, strictio=None):
265 """
266 Pickle an object to a string.
268 *protocol* is the pickler protocol, as defined for Python *pickle*.
270 If *byref=True*, then dill behaves a lot more like pickle as certain
271 objects (like modules) are pickled by reference as opposed to attempting
272 to pickle the object itself.
274 If *recurse=True*, then objects referred to in the global dictionary
275 are recursively traced and pickled, instead of the default behavior
276 of attempting to store the entire global dictionary. This is needed for
277 functions defined via *exec()*.
279 *fmode* (:const:`HANDLE_FMODE`, :const:`CONTENTS_FMODE`,
280 or :const:`FILE_FMODE`) indicates how file handles will be pickled.
281 For example, when pickling a data file handle for transfer to a remote
282 compute service, *FILE_FMODE* will include the file contents in the
283 pickle and cursor position so that a remote method can operate
284 transparently on an object with an open file handle.
286 Default values for keyword arguments can be set in :mod:`dill.settings`.
287 """
288 file = StringIO()
289 dump(obj, file, protocol, byref, fmode, recurse, **kwds)#, strictio)
290 return file.getvalue()
292def load(file, ignore=None, **kwds):
293 """
294 Unpickle an object from a file.
296 See :func:`loads` for keyword arguments.
297 """
298 return Unpickler(file, ignore=ignore, **kwds).load()
300def loads(str, ignore=None, **kwds):
301 """
302 Unpickle an object from a string.
304 If *ignore=False* then objects whose class is defined in the module
305 *__main__* are updated to reference the existing class in *__main__*,
306 otherwise they are left to refer to the reconstructed type, which may
307 be different.
309 Default values for keyword arguments can be set in :mod:`dill.settings`.
310 """
311 file = StringIO(str)
312 return load(file, ignore, **kwds)
314# def dumpzs(obj, protocol=None):
315# """pickle an object to a compressed string"""
316# return zlib.compress(dumps(obj, protocol))
318# def loadzs(str):
319# """unpickle an object from a compressed string"""
320# return loads(zlib.decompress(str))
322### End: Shorthands ###
324class MetaCatchingDict(dict):
325 def get(self, key, default=None):
326 try:
327 return self[key]
328 except KeyError:
329 return default
331 def __missing__(self, key):
332 if issubclass(key, type):
333 return save_type
334 else:
335 raise KeyError()
337class PickleWarning(Warning, PickleError):
338 pass
340class PicklingWarning(PickleWarning, PicklingError):
341 pass
343class UnpicklingWarning(PickleWarning, UnpicklingError):
344 pass
346### Extend the Picklers
347class Pickler(StockPickler):
348 """python's Pickler extended to interpreter sessions"""
349 dispatch: typing.Dict[type, typing.Callable[[Pickler, typing.Any], None]] \
350 = MetaCatchingDict(StockPickler.dispatch.copy())
351 """The dispatch table, a dictionary of serializing functions used
352 by Pickler to save objects of specific types. Use :func:`pickle`
353 or :func:`register` to associate types to custom functions.
355 :meta hide-value:
356 """
357 _session = False
358 from .settings import settings
360 def __init__(self, file, *args, **kwds):
361 settings = Pickler.settings
362 _byref = kwds.pop('byref', None)
363 #_strictio = kwds.pop('strictio', None)
364 _fmode = kwds.pop('fmode', None)
365 _recurse = kwds.pop('recurse', None)
366 StockPickler.__init__(self, file, *args, **kwds)
367 self._main = _main_module
368 self._diff_cache = {}
369 self._byref = settings['byref'] if _byref is None else _byref
370 self._strictio = False #_strictio
371 self._fmode = settings['fmode'] if _fmode is None else _fmode
372 self._recurse = settings['recurse'] if _recurse is None else _recurse
373 self._postproc = OrderedDict()
374 self._file = file
376 def save(self, obj, save_persistent_id=True):
377 # numpy hack
378 obj_type = type(obj)
379 if NumpyArrayType and not (obj_type is type or obj_type in Pickler.dispatch):
380 # register if the object is a numpy ufunc
381 # thanks to Paul Kienzle for pointing out ufuncs didn't pickle
382 if numpyufunc(obj_type):
383 @register(obj_type)
384 def save_numpy_ufunc(pickler, obj):
385 logger.trace(pickler, "Nu: %s", obj)
386 name = getattr(obj, '__qualname__', getattr(obj, '__name__', None))
387 StockPickler.save_global(pickler, obj, name=name)
388 logger.trace(pickler, "# Nu")
389 return
390 # NOTE: the above 'save' performs like:
391 # import copy_reg
392 # def udump(f): return f.__name__
393 # def uload(name): return getattr(numpy, name)
394 # copy_reg.pickle(NumpyUfuncType, udump, uload)
395 # register if the object is a numpy dtype
396 if numpydtype(obj_type):
397 @register(obj_type)
398 def save_numpy_dtype(pickler, obj):
399 logger.trace(pickler, "Dt: %s", obj)
400 pickler.save_reduce(_create_dtypemeta, (obj.type,), obj=obj)
401 logger.trace(pickler, "# Dt")
402 return
403 # NOTE: the above 'save' performs like:
404 # import copy_reg
405 # def uload(name): return type(NumpyDType(name))
406 # def udump(f): return uload, (f.type,)
407 # copy_reg.pickle(NumpyDTypeType, udump, uload)
408 # register if the object is a subclassed numpy array instance
409 if ndarraysubclassinstance(obj_type):
410 @register(obj_type)
411 def save_numpy_array(pickler, obj):
412 logger.trace(pickler, "Nu: (%s, %s)", obj.shape, obj.dtype)
413 npdict = getattr(obj, '__dict__', None)
414 f, args, state = obj.__reduce__()
415 pickler.save_reduce(_create_array, (f,args,state,npdict), obj=obj)
416 logger.trace(pickler, "# Nu")
417 return
418 # end numpy hack
420 if GENERATOR_FAIL and obj_type is GeneratorType:
421 msg = "Can't pickle %s: attribute lookup builtins.generator failed" % GeneratorType
422 raise PicklingError(msg)
423 StockPickler.save(self, obj, save_persistent_id)
425 save.__doc__ = StockPickler.save.__doc__
427 def dump(self, obj): #NOTE: if settings change, need to update attributes
428 logger.trace_setup(self)
429 StockPickler.dump(self, obj)
430 dump.__doc__ = StockPickler.dump.__doc__
432class Unpickler(StockUnpickler):
433 """python's Unpickler extended to interpreter sessions and more types"""
434 from .settings import settings
435 _session = False
437 def find_class(self, module, name):
438 if (module, name) == ('__builtin__', '__main__'):
439 return self._main.__dict__ #XXX: above set w/save_module_dict
440 elif (module, name) == ('__builtin__', 'NoneType'):
441 return type(None) #XXX: special case: NoneType missing
442 if module == 'dill.dill': module = 'dill._dill'
443 return StockUnpickler.find_class(self, module, name)
445 def __init__(self, *args, **kwds):
446 settings = Pickler.settings
447 _ignore = kwds.pop('ignore', None)
448 StockUnpickler.__init__(self, *args, **kwds)
449 self._main = _main_module
450 self._ignore = settings['ignore'] if _ignore is None else _ignore
452 def load(self): #NOTE: if settings change, need to update attributes
453 obj = StockUnpickler.load(self)
454 if type(obj).__module__ == getattr(_main_module, '__name__', '__main__'):
455 if not self._ignore:
456 # point obj class to main
457 try: obj.__class__ = getattr(self._main, type(obj).__name__)
458 except (AttributeError,TypeError): pass # defined in a file
459 #_main_module.__dict__.update(obj.__dict__) #XXX: should update globals ?
460 return obj
461 load.__doc__ = StockUnpickler.load.__doc__
462 pass
464'''
465def dispatch_table():
466 """get the dispatch table of registered types"""
467 return Pickler.dispatch
468'''
470pickle_dispatch_copy = StockPickler.dispatch.copy()
472def pickle(t, func):
473 """expose :attr:`~Pickler.dispatch` table for user-created extensions"""
474 Pickler.dispatch[t] = func
475 return
477def register(t):
478 """decorator to register types to Pickler's :attr:`~Pickler.dispatch` table"""
479 def proxy(func):
480 Pickler.dispatch[t] = func
481 return func
482 return proxy
484def _revert_extension():
485 """drop dill-registered types from pickle's dispatch table"""
486 for type, func in list(StockPickler.dispatch.items()):
487 if func.__module__ == __name__:
488 del StockPickler.dispatch[type]
489 if type in pickle_dispatch_copy:
490 StockPickler.dispatch[type] = pickle_dispatch_copy[type]
492def use_diff(on=True):
493 """
494 Reduces size of pickles by only including object which have changed.
496 Decreases pickle size but increases CPU time needed.
497 Also helps avoid some unpickleable objects.
498 MUST be called at start of script, otherwise changes will not be recorded.
499 """
500 global _use_diff, diff
501 _use_diff = on
502 if _use_diff and diff is None:
503 try:
504 from . import diff as d
505 except ImportError:
506 import diff as d
507 diff = d
509def _create_typemap():
510 import types
511 d = dict(list(__builtin__.__dict__.items()) + \
512 list(types.__dict__.items())).items()
513 for key, value in d:
514 if getattr(value, '__module__', None) == 'builtins' \
515 and type(value) is type:
516 yield key, value
517 return
518_reverse_typemap = dict(_create_typemap())
519_reverse_typemap.update({
520 'PartialType': PartialType,
521 'SuperType': SuperType,
522 'ItemGetterType': ItemGetterType,
523 'AttrGetterType': AttrGetterType,
524})
525if sys.hexversion < 0x30800a2:
526 _reverse_typemap.update({
527 'CellType': CellType,
528 })
530# "Incidental" implementation specific types. Unpickling these types in another
531# implementation of Python (PyPy -> CPython) is not guaranteed to work
533# This dictionary should contain all types that appear in Python implementations
534# but are not defined in https://docs.python.org/3/library/types.html#standard-interpreter-types
535x=OrderedDict()
536_incedental_reverse_typemap = {
537 'FileType': FileType,
538 'BufferedRandomType': BufferedRandomType,
539 'BufferedReaderType': BufferedReaderType,
540 'BufferedWriterType': BufferedWriterType,
541 'TextWrapperType': TextWrapperType,
542 'PyBufferedRandomType': PyBufferedRandomType,
543 'PyBufferedReaderType': PyBufferedReaderType,
544 'PyBufferedWriterType': PyBufferedWriterType,
545 'PyTextWrapperType': PyTextWrapperType,
546}
548_incedental_reverse_typemap.update({
549 "DictKeysType": type({}.keys()),
550 "DictValuesType": type({}.values()),
551 "DictItemsType": type({}.items()),
553 "OdictKeysType": type(x.keys()),
554 "OdictValuesType": type(x.values()),
555 "OdictItemsType": type(x.items()),
556})
558if ExitType:
559 _incedental_reverse_typemap['ExitType'] = ExitType
560if InputType:
561 _incedental_reverse_typemap['InputType'] = InputType
562 _incedental_reverse_typemap['OutputType'] = OutputType
564'''
565try:
566 import symtable
567 _incedental_reverse_typemap["SymtableEntryType"] = type(symtable.symtable("", "string", "exec")._table)
568except: #FIXME: fails to pickle
569 pass
571if sys.hexversion >= 0x30a00a0:
572 _incedental_reverse_typemap['LineIteratorType'] = type(compile('3', '', 'eval').co_lines())
573'''
575if sys.hexversion >= 0x30b00b0 and not IS_PYPY:
576 from types import GenericAlias
577 _incedental_reverse_typemap["GenericAliasIteratorType"] = type(iter(GenericAlias(list, (int,))))
578 '''
579 _incedental_reverse_typemap['PositionsIteratorType'] = type(compile('3', '', 'eval').co_positions())
580 '''
582try:
583 import winreg
584 _incedental_reverse_typemap["HKEYType"] = winreg.HKEYType
585except ImportError:
586 pass
588_reverse_typemap.update(_incedental_reverse_typemap)
589_incedental_types = set(_incedental_reverse_typemap.values())
591del x
593_typemap = dict((v, k) for k, v in _reverse_typemap.items())
595def _unmarshal(string):
596 return marshal.loads(string)
598def _load_type(name):
599 return _reverse_typemap[name]
601def _create_type(typeobj, *args):
602 return typeobj(*args)
604def _create_function(fcode, fglobals, fname=None, fdefaults=None,
605 fclosure=None, fdict=None, fkwdefaults=None):
606 # same as FunctionType, but enable passing __dict__ to new function,
607 # __dict__ is the storehouse for attributes added after function creation
608 func = FunctionType(fcode, fglobals or dict(), fname, fdefaults, fclosure)
609 if fdict is not None:
610 func.__dict__.update(fdict) #XXX: better copy? option to copy?
611 if fkwdefaults is not None:
612 func.__kwdefaults__ = fkwdefaults
613 # 'recurse' only stores referenced modules/objects in fglobals,
614 # thus we need to make sure that we have __builtins__ as well
615 if "__builtins__" not in func.__globals__:
616 func.__globals__["__builtins__"] = globals()["__builtins__"]
617 # assert id(fglobals) == id(func.__globals__)
618 return func
620class match:
621 """
622 Make avaialable a limited structural pattern matching-like syntax for Python < 3.10
624 Patterns can be only tuples (without types) currently.
625 Inspired by the package pattern-matching-PEP634.
627 Usage:
628 >>> with match(args) as m:
629 >>> if m.case(('x', 'y')):
630 >>> # use m.x and m.y
631 >>> elif m.case(('x', 'y', 'z')):
632 >>> # use m.x, m.y and m.z
634 Equivalent native code for Python >= 3.10:
635 >>> match args:
636 >>> case (x, y):
637 >>> # use x and y
638 >>> case (x, y, z):
639 >>> # use x, y and z
640 """
641 def __init__(self, value):
642 self.value = value
643 self._fields = None
644 def __enter__(self):
645 return self
646 def __exit__(self, *exc_info):
647 return False
648 def case(self, args): # *args, **kwargs):
649 """just handles tuple patterns"""
650 if len(self.value) != len(args): # + len(kwargs):
651 return False
652 #if not all(isinstance(arg, pat) for arg, pat in zip(self.value[len(args):], kwargs.values())):
653 # return False
654 self.args = args # (*args, *kwargs)
655 return True
656 @property
657 def fields(self):
658 # Only bind names to values if necessary.
659 if self._fields is None:
660 self._fields = dict(zip(self.args, self.value))
661 return self._fields
662 def __getattr__(self, item):
663 return self.fields[item]
665ALL_CODE_PARAMS = [
666 # Version New attribute CodeType parameters
667 ((3,11,'a'), 'co_endlinetable', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name qualname firstlineno linetable endlinetable columntable exceptiontable freevars cellvars'),
668 ((3,11), 'co_exceptiontable', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name qualname firstlineno linetable exceptiontable freevars cellvars'),
669 ((3,11,'p'), 'co_qualname', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name qualname firstlineno linetable freevars cellvars'),
670 ((3,10), 'co_linetable', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name firstlineno linetable freevars cellvars'),
671 ((3,8), 'co_posonlyargcount', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name firstlineno lnotab freevars cellvars'),
672 ((3,7), 'co_kwonlyargcount', 'argcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name firstlineno lnotab freevars cellvars'),
673 ]
674for version, new_attr, params in ALL_CODE_PARAMS:
675 if hasattr(CodeType, new_attr):
676 CODE_VERSION = version
677 CODE_PARAMS = params.split()
678 break
679ENCODE_PARAMS = set(CODE_PARAMS).intersection(
680 ['code', 'lnotab', 'linetable', 'endlinetable', 'columntable', 'exceptiontable'])
682def _create_code(*args):
683 if not isinstance(args[0], int): # co_lnotab stored from >= 3.10
684 LNOTAB, *args = args
685 else: # from < 3.10 (or pre-LNOTAB storage)
686 LNOTAB = b''
688 with match(args) as m:
689 # Python 3.11/3.12a (18 members)
690 if m.case((
691 'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6]
692 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'qualname', 'firstlineno', # args[6:14]
693 'linetable', 'exceptiontable', 'freevars', 'cellvars' # args[14:]
694 )):
695 if CODE_VERSION == (3,11):
696 return CodeType(
697 *args[:6],
698 args[6].encode() if hasattr(args[6], 'encode') else args[6], # code
699 *args[7:14],
700 args[14].encode() if hasattr(args[14], 'encode') else args[14], # linetable
701 args[15].encode() if hasattr(args[15], 'encode') else args[15], # exceptiontable
702 args[16],
703 args[17],
704 )
705 fields = m.fields
706 # PyPy 3.11 7.3.19+ (17 members)
707 elif m.case((
708 'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6]
709 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'qualname', # args[6:13]
710 'firstlineno', 'linetable', 'freevars', 'cellvars' # args[13:]
711 )):
712 if CODE_VERSION == (3,11,'p'):
713 return CodeType(
714 *args[:6],
715 args[6].encode() if hasattr(args[6], 'encode') else args[6], # code
716 *args[7:14],
717 args[14].encode() if hasattr(args[14], 'encode') else args[14], # linetable
718 args[15],
719 args[16],
720 )
721 fields = m.fields
722 # Python 3.10 or 3.8/3.9 (16 members)
723 elif m.case((
724 'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6]
725 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'firstlineno', # args[6:13]
726 'LNOTAB_OR_LINETABLE', 'freevars', 'cellvars' # args[13:]
727 )):
728 if CODE_VERSION == (3,10) or CODE_VERSION == (3,8):
729 return CodeType(
730 *args[:6],
731 args[6].encode() if hasattr(args[6], 'encode') else args[6], # code
732 *args[7:13],
733 args[13].encode() if hasattr(args[13], 'encode') else args[13], # lnotab/linetable
734 args[14],
735 args[15],
736 )
737 fields = m.fields
738 if CODE_VERSION >= (3,10):
739 fields['linetable'] = m.LNOTAB_OR_LINETABLE
740 else:
741 fields['lnotab'] = LNOTAB if LNOTAB else m.LNOTAB_OR_LINETABLE
742 # Python 3.7 (15 args)
743 elif m.case((
744 'argcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:5]
745 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'firstlineno', # args[5:12]
746 'lnotab', 'freevars', 'cellvars' # args[12:]
747 )):
748 if CODE_VERSION == (3,7):
749 return CodeType(
750 *args[:5],
751 args[5].encode() if hasattr(args[5], 'encode') else args[5], # code
752 *args[6:12],
753 args[12].encode() if hasattr(args[12], 'encode') else args[12], # lnotab
754 args[13],
755 args[14],
756 )
757 fields = m.fields
758 # Python 3.11a (20 members)
759 elif m.case((
760 'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6]
761 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'qualname', 'firstlineno', # args[6:14]
762 'linetable', 'endlinetable', 'columntable', 'exceptiontable', 'freevars', 'cellvars' # args[14:]
763 )):
764 if CODE_VERSION == (3,11,'a'):
765 return CodeType(
766 *args[:6],
767 args[6].encode() if hasattr(args[6], 'encode') else args[6], # code
768 *args[7:14],
769 *(a.encode() if hasattr(a, 'encode') else a for a in args[14:18]), # linetable-exceptiontable
770 args[18],
771 args[19],
772 )
773 fields = m.fields
774 else:
775 raise UnpicklingError("pattern match for code object failed")
777 # The args format doesn't match this version.
778 fields.setdefault('posonlyargcount', 0) # from python <= 3.7
779 fields.setdefault('lnotab', LNOTAB) # from python >= 3.10
780 fields.setdefault('linetable', b'') # from python <= 3.9
781 fields.setdefault('qualname', fields['name']) # from python <= 3.10
782 fields.setdefault('exceptiontable', b'') # from python <= 3.10
783 fields.setdefault('endlinetable', None) # from python != 3.11a
784 fields.setdefault('columntable', None) # from python != 3.11a
786 args = (fields[k].encode() if k in ENCODE_PARAMS and hasattr(fields[k], 'encode') else fields[k]
787 for k in CODE_PARAMS)
788 return CodeType(*args)
790def _create_ftype(ftypeobj, func, args, kwds):
791 if kwds is None:
792 kwds = {}
793 if args is None:
794 args = ()
795 return ftypeobj(func, *args, **kwds)
797def _create_typing_tuple(argz, *args): #NOTE: workaround python/cpython#94245
798 if not argz:
799 return typing.Tuple[()].copy_with(())
800 if argz == ((),):
801 return typing.Tuple[()]
802 return typing.Tuple[argz]
804if ThreadHandleType:
805 def _create_thread_handle(ident, done, *args): #XXX: ignores 'blocking'
806 from threading import _make_thread_handle
807 handle = _make_thread_handle(ident)
808 if done:
809 handle._set_done()
810 return handle
812def _create_lock(locked, *args): #XXX: ignores 'blocking'
813 from threading import Lock
814 lock = Lock()
815 if locked:
816 if not lock.acquire(False):
817 raise UnpicklingError("Cannot acquire lock")
818 return lock
820def _create_rlock(count, owner, *args): #XXX: ignores 'blocking'
821 lock = RLockType()
822 if owner is not None:
823 lock._acquire_restore((count, owner))
824 if owner and not lock._is_owned():
825 raise UnpicklingError("Cannot acquire lock")
826 return lock
828# thanks to matsjoyce for adding all the different file modes
829def _create_filehandle(name, mode, position, closed, open, strictio, fmode, fdata): # buffering=0
830 # only pickles the handle, not the file contents... good? or StringIO(data)?
831 # (for file contents see: http://effbot.org/librarybook/copy-reg.htm)
832 # NOTE: handle special cases first (are there more special cases?)
833 names = {'<stdin>':sys.__stdin__, '<stdout>':sys.__stdout__,
834 '<stderr>':sys.__stderr__} #XXX: better fileno=(0,1,2) ?
835 if name in list(names.keys()):
836 f = names[name] #XXX: safer "f=sys.stdin"
837 elif name == '<tmpfile>':
838 f = os.tmpfile()
839 elif name == '<fdopen>':
840 import tempfile
841 f = tempfile.TemporaryFile(mode)
842 else:
843 try:
844 exists = os.path.exists(name)
845 except Exception:
846 exists = False
847 if not exists:
848 if strictio:
849 raise FileNotFoundError("[Errno 2] No such file or directory: '%s'" % name)
850 elif "r" in mode and fmode != FILE_FMODE:
851 name = '<fdopen>' # or os.devnull?
852 current_size = 0 # or maintain position?
853 else:
854 current_size = os.path.getsize(name)
856 if position > current_size:
857 if strictio:
858 raise ValueError("invalid buffer size")
859 elif fmode == CONTENTS_FMODE:
860 position = current_size
861 # try to open the file by name
862 # NOTE: has different fileno
863 try:
864 #FIXME: missing: *buffering*, encoding, softspace
865 if fmode == FILE_FMODE:
866 f = open(name, mode if "w" in mode else "w")
867 f.write(fdata)
868 if "w" not in mode:
869 f.close()
870 f = open(name, mode)
871 elif name == '<fdopen>': # file did not exist
872 import tempfile
873 f = tempfile.TemporaryFile(mode)
874 # treat x mode as w mode
875 elif fmode == CONTENTS_FMODE \
876 and ("w" in mode or "x" in mode):
877 # stop truncation when opening
878 flags = os.O_CREAT
879 if "+" in mode:
880 flags |= os.O_RDWR
881 else:
882 flags |= os.O_WRONLY
883 f = os.fdopen(os.open(name, flags), mode)
884 # set name to the correct value
885 r = getattr(f, "buffer", f)
886 r = getattr(r, "raw", r)
887 r.name = name
888 assert f.name == name
889 else:
890 f = open(name, mode)
891 except (IOError, FileNotFoundError):
892 err = sys.exc_info()[1]
893 raise UnpicklingError(err)
894 if closed:
895 f.close()
896 elif position >= 0 and fmode != HANDLE_FMODE:
897 f.seek(position)
898 return f
900def _create_stringi(value, position, closed):
901 f = StringIO(value)
902 if closed: f.close()
903 else: f.seek(position)
904 return f
906def _create_stringo(value, position, closed):
907 f = StringIO()
908 if closed: f.close()
909 else:
910 f.write(value)
911 f.seek(position)
912 return f
914class _itemgetter_helper(object):
915 def __init__(self):
916 self.items = []
917 def __getitem__(self, item):
918 self.items.append(item)
919 return
921class _attrgetter_helper(object):
922 def __init__(self, attrs, index=None):
923 self.attrs = attrs
924 self.index = index
925 def __getattribute__(self, attr):
926 attrs = object.__getattribute__(self, "attrs")
927 index = object.__getattribute__(self, "index")
928 if index is None:
929 index = len(attrs)
930 attrs.append(attr)
931 else:
932 attrs[index] = ".".join([attrs[index], attr])
933 return type(self)(attrs, index)
935class _dictproxy_helper(dict):
936 def __ror__(self, a):
937 return a
939_dictproxy_helper_instance = _dictproxy_helper()
941__d = {}
942try:
943 # In CPython 3.9 and later, this trick can be used to exploit the
944 # implementation of the __or__ function of MappingProxyType to get the true
945 # mapping referenced by the proxy. It may work for other implementations,
946 # but is not guaranteed.
947 MAPPING_PROXY_TRICK = __d is (DictProxyType(__d) | _dictproxy_helper_instance)
948except Exception:
949 MAPPING_PROXY_TRICK = False
950del __d
952# _CELL_REF and _CELL_EMPTY are used to stay compatible with versions of dill
953# whose _create_cell functions do not have a default value.
954# _CELL_REF can be safely removed entirely (replaced by empty tuples for calls
955# to _create_cell) once breaking changes are allowed.
956_CELL_REF = None
957_CELL_EMPTY = Sentinel('_CELL_EMPTY')
959def _create_cell(contents=None):
960 if contents is not _CELL_EMPTY:
961 value = contents
962 return (lambda: value).__closure__[0]
964def _create_weakref(obj, *args):
965 from weakref import ref
966 if obj is None: # it's dead
967 from collections import UserDict
968 return ref(UserDict(), *args)
969 return ref(obj, *args)
971def _create_weakproxy(obj, callable=False, *args):
972 from weakref import proxy
973 if obj is None: # it's dead
974 if callable: return proxy(lambda x:x, *args)
975 from collections import UserDict
976 return proxy(UserDict(), *args)
977 return proxy(obj, *args)
979def _eval_repr(repr_str):
980 return eval(repr_str)
982def _create_array(f, args, state, npdict=None):
983 #array = numpy.core.multiarray._reconstruct(*args)
984 array = f(*args)
985 array.__setstate__(state)
986 if npdict is not None: # we also have saved state in __dict__
987 array.__dict__.update(npdict)
988 return array
990def _create_dtypemeta(scalar_type):
991 if NumpyDType is True: __hook__() # a bit hacky I think
992 if scalar_type is None:
993 return NumpyDType
994 return type(NumpyDType(scalar_type))
996def _create_namedtuple(name, fieldnames, modulename, defaults=None):
997 class_ = _import_module(modulename + '.' + name, safe=True)
998 if class_ is not None:
999 return class_
1000 import collections
1001 t = collections.namedtuple(name, fieldnames, defaults=defaults, module=modulename)
1002 return t
1004def _create_capsule(pointer, name, context, destructor):
1005 attr_found = False
1006 try:
1007 # based on https://github.com/python/cpython/blob/f4095e53ab708d95e019c909d5928502775ba68f/Objects/capsule.c#L209-L231
1008 uname = name.decode('utf8')
1009 for i in range(1, uname.count('.')+1):
1010 names = uname.rsplit('.', i)
1011 try:
1012 module = __import__(names[0])
1013 except ImportError:
1014 pass
1015 obj = module
1016 for attr in names[1:]:
1017 obj = getattr(obj, attr)
1018 capsule = obj
1019 attr_found = True
1020 break
1021 except Exception:
1022 pass
1024 if attr_found:
1025 if _PyCapsule_IsValid(capsule, name):
1026 return capsule
1027 raise UnpicklingError("%s object exists at %s but a PyCapsule object was expected." % (type(capsule), name))
1028 else:
1029 #warnings.warn('Creating a new PyCapsule %s for a C data structure that may not be present in memory. Segmentation faults or other memory errors are possible.' % (name,), UnpicklingWarning)
1030 capsule = _PyCapsule_New(pointer, name, destructor)
1031 _PyCapsule_SetContext(capsule, context)
1032 return capsule
1034def _getattr(objclass, name, repr_str):
1035 # hack to grab the reference directly
1036 try: #XXX: works only for __builtin__ ?
1037 attr = repr_str.split("'")[3]
1038 return eval(attr+'.__dict__["'+name+'"]')
1039 except Exception:
1040 try:
1041 attr = objclass.__dict__
1042 if type(attr) is DictProxyType:
1043 if sys.hexversion > 0x30f00a0 and name in ('__weakref__','__dict__'):
1044 attr = _dictproxy_helper.__dict__[name]
1045 else:
1046 attr = attr[name]
1047 else:
1048 attr = getattr(objclass,name)
1049 except (AttributeError, KeyError):
1050 attr = getattr(objclass,name)
1051 return attr
1053def _get_attr(self, name):
1054 # stop recursive pickling
1055 return getattr(self, name, None) or getattr(__builtin__, name)
1057def _import_module(import_name, safe=False):
1058 try:
1059 if import_name.startswith('__runtime__.'):
1060 return sys.modules[import_name]
1061 elif '.' in import_name:
1062 items = import_name.split('.')
1063 module = '.'.join(items[:-1])
1064 obj = items[-1]
1065 submodule = getattr(__import__(module, None, None, [obj]), obj)
1066 if isinstance(submodule, (ModuleType, type)):
1067 return submodule
1068 return __import__(import_name, None, None, [obj])
1069 else:
1070 return __import__(import_name)
1071 except (ImportError, AttributeError, KeyError):
1072 if safe:
1073 return None
1074 raise
1076# https://github.com/python/cpython/blob/a8912a0f8d9eba6d502c37d522221f9933e976db/Lib/pickle.py#L322-L333
1077def _getattribute(obj, name):
1078 for subpath in name.split('.'):
1079 if subpath == '<locals>':
1080 raise AttributeError("Can't get local attribute {!r} on {!r}"
1081 .format(name, obj))
1082 try:
1083 parent = obj
1084 obj = getattr(obj, subpath)
1085 except AttributeError:
1086 raise AttributeError("Can't get attribute {!r} on {!r}"
1087 .format(name, obj))
1088 return obj, parent
1090def _locate_function(obj, pickler=None):
1091 module_name = getattr(obj, '__module__', None)
1092 if module_name in ['__main__', None] or \
1093 pickler and is_dill(pickler, child=False) and pickler._session and module_name == pickler._main.__name__:
1094 return False
1095 if hasattr(obj, '__qualname__'):
1096 module = _import_module(module_name, safe=True)
1097 try:
1098 found, _ = _getattribute(module, obj.__qualname__)
1099 return found is obj
1100 except AttributeError:
1101 return False
1102 else:
1103 found = _import_module(module_name + '.' + obj.__name__, safe=True)
1104 return found is obj
1107def _setitems(dest, source):
1108 for k, v in source.items():
1109 dest[k] = v
1112def _save_with_postproc(pickler, reduction, is_pickler_dill=None, obj=Getattr.NO_DEFAULT, postproc_list=None):
1113 if obj is Getattr.NO_DEFAULT:
1114 obj = Reduce(reduction) # pragma: no cover
1116 if is_pickler_dill is None:
1117 is_pickler_dill = is_dill(pickler, child=True)
1118 if is_pickler_dill:
1119 # assert id(obj) not in pickler._postproc, str(obj) + ' already pushed on stack!'
1120 # if not hasattr(pickler, 'x'): pickler.x = 0
1121 # print(pickler.x*' ', 'push', obj, id(obj), pickler._recurse)
1122 # pickler.x += 1
1123 if postproc_list is None:
1124 postproc_list = []
1126 # Recursive object not supported. Default to a global instead.
1127 if id(obj) in pickler._postproc:
1128 name = '%s.%s ' % (obj.__module__, getattr(obj, '__qualname__', obj.__name__)) if hasattr(obj, '__module__') else ''
1129 warnings.warn('Cannot pickle %r: %shas recursive self-references that trigger a RecursionError.' % (obj, name), PicklingWarning)
1130 pickler.save_global(obj)
1131 return
1132 pickler._postproc[id(obj)] = postproc_list
1134 # TODO: Use state_setter in Python 3.8 to allow for faster cPickle implementations
1135 pickler.save_reduce(*reduction, obj=obj)
1137 if is_pickler_dill:
1138 # pickler.x -= 1
1139 # print(pickler.x*' ', 'pop', obj, id(obj))
1140 postproc = pickler._postproc.pop(id(obj))
1141 # assert postproc_list == postproc, 'Stack tampered!'
1142 for reduction in reversed(postproc):
1143 if reduction[0] is _setitems:
1144 # use the internal machinery of pickle.py to speedup when
1145 # updating a dictionary in postproc
1146 dest, source = reduction[1]
1147 if source:
1148 pickler.write(pickler.get(pickler.memo[id(dest)][0]))
1149 if sys.hexversion < 0x30e00a1:
1150 pickler._batch_setitems(iter(source.items()))
1151 else:
1152 pickler._batch_setitems(iter(source.items()), obj=obj)
1153 else:
1154 # Updating with an empty dictionary. Same as doing nothing.
1155 continue
1156 else:
1157 pickler.save_reduce(*reduction)
1158 # pop None created by calling preprocessing step off stack
1159 pickler.write(POP)
1161#@register(CodeType)
1162#def save_code(pickler, obj):
1163# logger.trace(pickler, "Co: %s", obj)
1164# pickler.save_reduce(_unmarshal, (marshal.dumps(obj),), obj=obj)
1165# logger.trace(pickler, "# Co")
1166# return
1168# The following function is based on 'save_codeobject' from 'cloudpickle'
1169# Copyright (c) 2012, Regents of the University of California.
1170# Copyright (c) 2009 `PiCloud, Inc. <http://www.picloud.com>`_.
1171# License: https://github.com/cloudpipe/cloudpickle/blob/master/LICENSE
1172@register(CodeType)
1173def save_code(pickler, obj):
1174 logger.trace(pickler, "Co: %s", obj)
1175 if hasattr(obj, "co_endlinetable"): # python 3.11a (20 args)
1176 args = (
1177 obj.co_lnotab, # for < python 3.10 [not counted in args]
1178 obj.co_argcount, obj.co_posonlyargcount,
1179 obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize,
1180 obj.co_flags, obj.co_code, obj.co_consts, obj.co_names,
1181 obj.co_varnames, obj.co_filename, obj.co_name, obj.co_qualname,
1182 obj.co_firstlineno, obj.co_linetable, obj.co_endlinetable,
1183 obj.co_columntable, obj.co_exceptiontable, obj.co_freevars,
1184 obj.co_cellvars
1185 )
1186 elif hasattr(obj, "co_exceptiontable"): # python 3.11 (18 args)
1187 with warnings.catch_warnings():
1188 if not OLD312a7: # issue 597
1189 warnings.filterwarnings('ignore', category=DeprecationWarning)
1190 args = (
1191 obj.co_lnotab, # for < python 3.10 [not counted in args]
1192 obj.co_argcount, obj.co_posonlyargcount,
1193 obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize,
1194 obj.co_flags, obj.co_code, obj.co_consts, obj.co_names,
1195 obj.co_varnames, obj.co_filename, obj.co_name, obj.co_qualname,
1196 obj.co_firstlineno, obj.co_linetable, obj.co_exceptiontable,
1197 obj.co_freevars, obj.co_cellvars
1198 )
1199 elif hasattr(obj, "co_qualname"): # pypy 3.11 7.3.19+ (17 args)
1200 args = (
1201 obj.co_lnotab, obj.co_argcount, obj.co_posonlyargcount,
1202 obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize,
1203 obj.co_flags, obj.co_code, obj.co_consts, obj.co_names,
1204 obj.co_varnames, obj.co_filename, obj.co_name, obj.co_qualname,
1205 obj.co_firstlineno, obj.co_linetable, obj.co_freevars,
1206 obj.co_cellvars
1207 )
1208 elif hasattr(obj, "co_linetable"): # python 3.10 (16 args)
1209 args = (
1210 obj.co_lnotab, # for < python 3.10 [not counted in args]
1211 obj.co_argcount, obj.co_posonlyargcount,
1212 obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize,
1213 obj.co_flags, obj.co_code, obj.co_consts, obj.co_names,
1214 obj.co_varnames, obj.co_filename, obj.co_name,
1215 obj.co_firstlineno, obj.co_linetable, obj.co_freevars,
1216 obj.co_cellvars
1217 )
1218 elif hasattr(obj, "co_posonlyargcount"): # python 3.8 (16 args)
1219 args = (
1220 obj.co_argcount, obj.co_posonlyargcount,
1221 obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize,
1222 obj.co_flags, obj.co_code, obj.co_consts, obj.co_names,
1223 obj.co_varnames, obj.co_filename, obj.co_name,
1224 obj.co_firstlineno, obj.co_lnotab, obj.co_freevars,
1225 obj.co_cellvars
1226 )
1227 else: # python 3.7 (15 args)
1228 args = (
1229 obj.co_argcount, obj.co_kwonlyargcount, obj.co_nlocals,
1230 obj.co_stacksize, obj.co_flags, obj.co_code, obj.co_consts,
1231 obj.co_names, obj.co_varnames, obj.co_filename,
1232 obj.co_name, obj.co_firstlineno, obj.co_lnotab,
1233 obj.co_freevars, obj.co_cellvars
1234 )
1236 pickler.save_reduce(_create_code, args, obj=obj)
1237 logger.trace(pickler, "# Co")
1238 return
1240def _repr_dict(obj):
1241 """Make a short string representation of a dictionary."""
1242 return "<%s object at %#012x>" % (type(obj).__name__, id(obj))
1244@register(dict)
1245def save_module_dict(pickler, obj):
1246 if is_dill(pickler, child=False) and obj == pickler._main.__dict__ and \
1247 not (pickler._session and pickler._first_pass):
1248 logger.trace(pickler, "D1: %s", _repr_dict(obj)) # obj
1249 pickler.write(bytes('c__builtin__\n__main__\n', 'UTF-8'))
1250 logger.trace(pickler, "# D1")
1251 elif (not is_dill(pickler, child=False)) and (obj == _main_module.__dict__):
1252 logger.trace(pickler, "D3: %s", _repr_dict(obj)) # obj
1253 pickler.write(bytes('c__main__\n__dict__\n', 'UTF-8')) #XXX: works in general?
1254 logger.trace(pickler, "# D3")
1255 elif '__name__' in obj and obj != _main_module.__dict__ \
1256 and type(obj['__name__']) is str \
1257 and obj is getattr(_import_module(obj['__name__'],True), '__dict__', None):
1258 logger.trace(pickler, "D4: %s", _repr_dict(obj)) # obj
1259 pickler.write(bytes('c%s\n__dict__\n' % obj['__name__'], 'UTF-8'))
1260 logger.trace(pickler, "# D4")
1261 else:
1262 logger.trace(pickler, "D2: %s", _repr_dict(obj)) # obj
1263 if is_dill(pickler, child=False) and pickler._session:
1264 # we only care about session the first pass thru
1265 pickler._first_pass = False
1266 StockPickler.save_dict(pickler, obj)
1267 logger.trace(pickler, "# D2")
1268 return
1271if not OLD310 and MAPPING_PROXY_TRICK:
1272 def save_dict_view(dicttype):
1273 def save_dict_view_for_function(func):
1274 def _save_dict_view(pickler, obj):
1275 logger.trace(pickler, "Dkvi: <%s>", obj)
1276 mapping = obj.mapping | _dictproxy_helper_instance
1277 pickler.save_reduce(func, (mapping,), obj=obj)
1278 logger.trace(pickler, "# Dkvi")
1279 return _save_dict_view
1280 return [
1281 (funcname, save_dict_view_for_function(getattr(dicttype, funcname)))
1282 for funcname in ('keys', 'values', 'items')
1283 ]
1284else:
1285 # The following functions are based on 'cloudpickle'
1286 # https://github.com/cloudpipe/cloudpickle/blob/5d89947288a18029672596a4d719093cc6d5a412/cloudpickle/cloudpickle.py#L922-L940
1287 # Copyright (c) 2012, Regents of the University of California.
1288 # Copyright (c) 2009 `PiCloud, Inc. <http://www.picloud.com>`_.
1289 # License: https://github.com/cloudpipe/cloudpickle/blob/master/LICENSE
1290 def save_dict_view(dicttype):
1291 def save_dict_keys(pickler, obj):
1292 logger.trace(pickler, "Dk: <%s>", obj)
1293 dict_constructor = _shims.Reduce(dicttype.fromkeys, (list(obj),))
1294 pickler.save_reduce(dicttype.keys, (dict_constructor,), obj=obj)
1295 logger.trace(pickler, "# Dk")
1297 def save_dict_values(pickler, obj):
1298 logger.trace(pickler, "Dv: <%s>", obj)
1299 dict_constructor = _shims.Reduce(dicttype, (enumerate(obj),))
1300 pickler.save_reduce(dicttype.values, (dict_constructor,), obj=obj)
1301 logger.trace(pickler, "# Dv")
1303 def save_dict_items(pickler, obj):
1304 logger.trace(pickler, "Di: <%s>", obj)
1305 pickler.save_reduce(dicttype.items, (dicttype(obj),), obj=obj)
1306 logger.trace(pickler, "# Di")
1308 return (
1309 ('keys', save_dict_keys),
1310 ('values', save_dict_values),
1311 ('items', save_dict_items)
1312 )
1314for __dicttype in (
1315 dict,
1316 OrderedDict
1317):
1318 __obj = __dicttype()
1319 for __funcname, __savefunc in save_dict_view(__dicttype):
1320 __tview = type(getattr(__obj, __funcname)())
1321 if __tview not in Pickler.dispatch:
1322 Pickler.dispatch[__tview] = __savefunc
1323del __dicttype, __obj, __funcname, __tview, __savefunc
1326@register(ClassType)
1327def save_classobj(pickler, obj): #FIXME: enable pickler._byref
1328 if not _locate_function(obj, pickler):
1329 logger.trace(pickler, "C1: %s", obj)
1330 pickler.save_reduce(ClassType, (obj.__name__, obj.__bases__,
1331 obj.__dict__), obj=obj)
1332 #XXX: or obj.__dict__.copy()), obj=obj) ?
1333 logger.trace(pickler, "# C1")
1334 else:
1335 logger.trace(pickler, "C2: %s", obj)
1336 name = getattr(obj, '__qualname__', getattr(obj, '__name__', None))
1337 StockPickler.save_global(pickler, obj, name=name)
1338 logger.trace(pickler, "# C2")
1339 return
1341@register(typing._GenericAlias)
1342def save_generic_alias(pickler, obj):
1343 args = obj.__args__
1344 if type(obj.__reduce__()) is str:
1345 logger.trace(pickler, "Ga0: %s", obj)
1346 StockPickler.save_global(pickler, obj, name=obj.__reduce__())
1347 logger.trace(pickler, "# Ga0")
1348 elif obj.__origin__ is tuple and (not args or args == ((),)):
1349 logger.trace(pickler, "Ga1: %s", obj)
1350 pickler.save_reduce(_create_typing_tuple, (args,), obj=obj)
1351 logger.trace(pickler, "# Ga1")
1352 else:
1353 logger.trace(pickler, "Ga2: %s", obj)
1354 StockPickler.save_reduce(pickler, *obj.__reduce__(), obj=obj)
1355 logger.trace(pickler, "# Ga2")
1356 return
1358if ThreadHandleType:
1359 @register(ThreadHandleType)
1360 def save_thread_handle(pickler, obj):
1361 logger.trace(pickler, "Th: %s", obj)
1362 pickler.save_reduce(_create_thread_handle, (obj.ident, obj.is_done()), obj=obj)
1363 logger.trace(pickler, "# Th")
1364 return
1366@register(LockType) #XXX: copied Thread will have new Event (due to new Lock)
1367def save_lock(pickler, obj):
1368 logger.trace(pickler, "Lo: %s", obj)
1369 pickler.save_reduce(_create_lock, (obj.locked(),), obj=obj)
1370 logger.trace(pickler, "# Lo")
1371 return
1373@register(RLockType)
1374def save_rlock(pickler, obj):
1375 logger.trace(pickler, "RL: %s", obj)
1376 r = obj.__repr__() # don't use _release_save as it unlocks the lock
1377 count = int(r.split('count=')[1].split()[0].rstrip('>'))
1378 owner = int(r.split('owner=')[1].split()[0])
1379 pickler.save_reduce(_create_rlock, (count,owner,), obj=obj)
1380 logger.trace(pickler, "# RL")
1381 return
1383#@register(SocketType) #FIXME: causes multiprocess test_pickling FAIL
1384def save_socket(pickler, obj):
1385 logger.trace(pickler, "So: %s", obj)
1386 pickler.save_reduce(*reduce_socket(obj))
1387 logger.trace(pickler, "# So")
1388 return
1390def _save_file(pickler, obj, open_):
1391 if obj.closed:
1392 position = 0
1393 else:
1394 obj.flush()
1395 if obj in (sys.__stdout__, sys.__stderr__, sys.__stdin__):
1396 position = -1
1397 else:
1398 position = obj.tell()
1399 if is_dill(pickler, child=True) and pickler._fmode == FILE_FMODE:
1400 f = open_(obj.name, "r")
1401 fdata = f.read()
1402 f.close()
1403 else:
1404 fdata = ""
1405 if is_dill(pickler, child=True):
1406 strictio = pickler._strictio
1407 fmode = pickler._fmode
1408 else:
1409 strictio = False
1410 fmode = 0 # HANDLE_FMODE
1411 pickler.save_reduce(_create_filehandle, (obj.name, obj.mode, position,
1412 obj.closed, open_, strictio,
1413 fmode, fdata), obj=obj)
1414 return
1417@register(FileType) #XXX: in 3.x has buffer=0, needs different _create?
1418@register(BufferedReaderType)
1419@register(BufferedWriterType)
1420@register(TextWrapperType)
1421def save_file(pickler, obj):
1422 logger.trace(pickler, "Fi: %s", obj)
1423 f = _save_file(pickler, obj, open)
1424 logger.trace(pickler, "# Fi")
1425 return f
1427if BufferedRandomType:
1428 @register(BufferedRandomType)
1429 def save_file(pickler, obj):
1430 logger.trace(pickler, "Fi: %s", obj)
1431 f = _save_file(pickler, obj, open)
1432 logger.trace(pickler, "# Fi")
1433 return f
1435if PyTextWrapperType:
1436 @register(PyBufferedReaderType)
1437 @register(PyBufferedWriterType)
1438 @register(PyTextWrapperType)
1439 def save_file(pickler, obj):
1440 logger.trace(pickler, "Fi: %s", obj)
1441 f = _save_file(pickler, obj, _open)
1442 logger.trace(pickler, "# Fi")
1443 return f
1445 if PyBufferedRandomType:
1446 @register(PyBufferedRandomType)
1447 def save_file(pickler, obj):
1448 logger.trace(pickler, "Fi: %s", obj)
1449 f = _save_file(pickler, obj, _open)
1450 logger.trace(pickler, "# Fi")
1451 return f
1454# The following two functions are based on 'saveCStringIoInput'
1455# and 'saveCStringIoOutput' from spickle
1456# Copyright (c) 2011 by science+computing ag
1457# License: http://www.apache.org/licenses/LICENSE-2.0
1458if InputType:
1459 @register(InputType)
1460 def save_stringi(pickler, obj):
1461 logger.trace(pickler, "Io: %s", obj)
1462 if obj.closed:
1463 value = ''; position = 0
1464 else:
1465 value = obj.getvalue(); position = obj.tell()
1466 pickler.save_reduce(_create_stringi, (value, position, \
1467 obj.closed), obj=obj)
1468 logger.trace(pickler, "# Io")
1469 return
1471 @register(OutputType)
1472 def save_stringo(pickler, obj):
1473 logger.trace(pickler, "Io: %s", obj)
1474 if obj.closed:
1475 value = ''; position = 0
1476 else:
1477 value = obj.getvalue(); position = obj.tell()
1478 pickler.save_reduce(_create_stringo, (value, position, \
1479 obj.closed), obj=obj)
1480 logger.trace(pickler, "# Io")
1481 return
1483if LRUCacheType is not None:
1484 from functools import lru_cache
1485 @register(LRUCacheType)
1486 def save_lru_cache(pickler, obj):
1487 logger.trace(pickler, "LRU: %s", obj)
1488 if OLD39:
1489 kwargs = obj.cache_info()
1490 args = (kwargs.maxsize,)
1491 else:
1492 kwargs = obj.cache_parameters()
1493 args = (kwargs['maxsize'], kwargs['typed'])
1494 if args != lru_cache.__defaults__:
1495 wrapper = Reduce(lru_cache, args, is_callable=True)
1496 else:
1497 wrapper = lru_cache
1498 pickler.save_reduce(wrapper, (obj.__wrapped__,), obj=obj)
1499 logger.trace(pickler, "# LRU")
1500 return
1502@register(SuperType)
1503def save_super(pickler, obj):
1504 logger.trace(pickler, "Su: %s", obj)
1505 pickler.save_reduce(super, (obj.__thisclass__, obj.__self__), obj=obj)
1506 logger.trace(pickler, "# Su")
1507 return
1509if IS_PYPY:
1510 @register(MethodType)
1511 def save_instancemethod0(pickler, obj):
1512 code = getattr(obj.__func__, '__code__', None)
1513 if code is not None and type(code) is not CodeType \
1514 and getattr(obj.__self__, obj.__name__) == obj:
1515 # Some PyPy builtin functions have no module name
1516 logger.trace(pickler, "Me2: %s", obj)
1517 # TODO: verify that this works for all PyPy builtin methods
1518 pickler.save_reduce(getattr, (obj.__self__, obj.__name__), obj=obj)
1519 logger.trace(pickler, "# Me2")
1520 return
1522 logger.trace(pickler, "Me1: %s", obj)
1523 pickler.save_reduce(MethodType, (obj.__func__, obj.__self__), obj=obj)
1524 logger.trace(pickler, "# Me1")
1525 return
1526else:
1527 @register(MethodType)
1528 def save_instancemethod0(pickler, obj):
1529 logger.trace(pickler, "Me1: %s", obj)
1530 pickler.save_reduce(MethodType, (obj.__func__, obj.__self__), obj=obj)
1531 logger.trace(pickler, "# Me1")
1532 return
1534if not IS_PYPY:
1535 @register(MemberDescriptorType)
1536 @register(GetSetDescriptorType)
1537 @register(MethodDescriptorType)
1538 @register(WrapperDescriptorType)
1539 @register(ClassMethodDescriptorType)
1540 def save_wrapper_descriptor(pickler, obj):
1541 logger.trace(pickler, "Wr: %s", obj)
1542 pickler.save_reduce(_getattr, (obj.__objclass__, obj.__name__,
1543 obj.__repr__()), obj=obj)
1544 logger.trace(pickler, "# Wr")
1545 return
1546else:
1547 @register(MemberDescriptorType)
1548 @register(GetSetDescriptorType)
1549 def save_wrapper_descriptor(pickler, obj):
1550 logger.trace(pickler, "Wr: %s", obj)
1551 pickler.save_reduce(_getattr, (obj.__objclass__, obj.__name__,
1552 obj.__repr__()), obj=obj)
1553 logger.trace(pickler, "# Wr")
1554 return
1556@register(CellType)
1557def save_cell(pickler, obj):
1558 try:
1559 f = obj.cell_contents
1560 except ValueError: # cell is empty
1561 logger.trace(pickler, "Ce3: %s", obj)
1562 # _shims._CELL_EMPTY is defined in _shims.py to support PyPy 2.7.
1563 # It unpickles to a sentinel object _dill._CELL_EMPTY, also created in
1564 # _shims.py. This object is not present in Python 3 because the cell's
1565 # contents can be deleted in newer versions of Python. The reduce object
1566 # will instead unpickle to None if unpickled in Python 3.
1568 # When breaking changes are made to dill, (_shims._CELL_EMPTY,) can
1569 # be replaced by () OR the delattr function can be removed repending on
1570 # whichever is more convienient.
1571 pickler.save_reduce(_create_cell, (_shims._CELL_EMPTY,), obj=obj)
1572 # Call the function _delattr on the cell's cell_contents attribute
1573 # The result of this function call will be None
1574 pickler.save_reduce(_shims._delattr, (obj, 'cell_contents'))
1575 # pop None created by calling _delattr off stack
1576 pickler.write(POP)
1577 logger.trace(pickler, "# Ce3")
1578 return
1579 if is_dill(pickler, child=True):
1580 if id(f) in pickler._postproc:
1581 # Already seen. Add to its postprocessing.
1582 postproc = pickler._postproc[id(f)]
1583 else:
1584 # Haven't seen it. Add to the highest possible object and set its
1585 # value as late as possible to prevent cycle.
1586 postproc = next(iter(pickler._postproc.values()), None)
1587 if postproc is not None:
1588 logger.trace(pickler, "Ce2: %s", obj)
1589 # _CELL_REF is defined in _shims.py to support older versions of
1590 # dill. When breaking changes are made to dill, (_CELL_REF,) can
1591 # be replaced by ()
1592 pickler.save_reduce(_create_cell, (_CELL_REF,), obj=obj)
1593 postproc.append((_shims._setattr, (obj, 'cell_contents', f)))
1594 logger.trace(pickler, "# Ce2")
1595 return
1596 logger.trace(pickler, "Ce1: %s", obj)
1597 pickler.save_reduce(_create_cell, (f,), obj=obj)
1598 logger.trace(pickler, "# Ce1")
1599 return
1601if MAPPING_PROXY_TRICK:
1602 @register(DictProxyType)
1603 def save_dictproxy(pickler, obj):
1604 logger.trace(pickler, "Mp: %s", _repr_dict(obj)) # obj
1605 mapping = obj | _dictproxy_helper_instance
1606 pickler.save_reduce(DictProxyType, (mapping,), obj=obj)
1607 logger.trace(pickler, "# Mp")
1608 return
1609else:
1610 @register(DictProxyType)
1611 def save_dictproxy(pickler, obj):
1612 logger.trace(pickler, "Mp: %s", _repr_dict(obj)) # obj
1613 pickler.save_reduce(DictProxyType, (obj.copy(),), obj=obj)
1614 logger.trace(pickler, "# Mp")
1615 return
1617@register(SliceType)
1618def save_slice(pickler, obj):
1619 logger.trace(pickler, "Sl: %s", obj)
1620 pickler.save_reduce(slice, (obj.start, obj.stop, obj.step), obj=obj)
1621 logger.trace(pickler, "# Sl")
1622 return
1624@register(XRangeType)
1625@register(EllipsisType)
1626@register(NotImplementedType)
1627def save_singleton(pickler, obj):
1628 logger.trace(pickler, "Si: %s", obj)
1629 pickler.save_reduce(_eval_repr, (obj.__repr__(),), obj=obj)
1630 logger.trace(pickler, "# Si")
1631 return
1633def _proxy_helper(obj): # a dead proxy returns a reference to None
1634 """get memory address of proxy's reference object"""
1635 _repr = repr(obj)
1636 try: _str = str(obj)
1637 except ReferenceError: # it's a dead proxy
1638 return id(None)
1639 if _str == _repr: return id(obj) # it's a repr
1640 try: # either way, it's a proxy from here
1641 address = int(_str.rstrip('>').split(' at ')[-1], base=16)
1642 except ValueError: # special case: proxy of a 'type'
1643 if not IS_PYPY:
1644 address = int(_repr.rstrip('>').split(' at ')[-1], base=16)
1645 else:
1646 objects = iter(gc.get_objects())
1647 for _obj in objects:
1648 if repr(_obj) == _str: return id(_obj)
1649 # all bad below... nothing found so throw ReferenceError
1650 msg = "Cannot reference object for proxy at '%s'" % id(obj)
1651 raise ReferenceError(msg)
1652 return address
1654def _locate_object(address, module=None):
1655 """get object located at the given memory address (inverse of id(obj))"""
1656 special = [None, True, False] #XXX: more...?
1657 for obj in special:
1658 if address == id(obj): return obj
1659 if module:
1660 objects = iter(module.__dict__.values())
1661 else: objects = iter(gc.get_objects())
1662 for obj in objects:
1663 if address == id(obj): return obj
1664 # all bad below... nothing found so throw ReferenceError or TypeError
1665 try: address = hex(address)
1666 except TypeError:
1667 raise TypeError("'%s' is not a valid memory address" % str(address))
1668 raise ReferenceError("Cannot reference object at '%s'" % address)
1670@register(ReferenceType)
1671def save_weakref(pickler, obj):
1672 refobj = obj()
1673 logger.trace(pickler, "R1: %s", obj)
1674 #refobj = ctypes.pythonapi.PyWeakref_GetObject(obj) # dead returns "None"
1675 pickler.save_reduce(_create_weakref, (refobj,), obj=obj)
1676 logger.trace(pickler, "# R1")
1677 return
1679@register(ProxyType)
1680@register(CallableProxyType)
1681def save_weakproxy(pickler, obj):
1682 # Must do string substitution here and use %r to avoid ReferenceError.
1683 logger.trace(pickler, "R2: %r" % obj)
1684 refobj = _locate_object(_proxy_helper(obj))
1685 pickler.save_reduce(_create_weakproxy, (refobj, callable(obj)), obj=obj)
1686 logger.trace(pickler, "# R2")
1687 return
1689def _is_builtin_module(module):
1690 if not hasattr(module, "__file__"): return True
1691 if module.__file__ is None: return False
1692 # If a module file name starts with prefix, it should be a builtin
1693 # module, so should always be pickled as a reference.
1694 names = ["base_prefix", "base_exec_prefix", "exec_prefix", "prefix", "real_prefix"]
1695 rp = os.path.realpath
1696 # See https://github.com/uqfoundation/dill/issues/566
1697 return (
1698 any(
1699 module.__file__.startswith(getattr(sys, name))
1700 or rp(module.__file__).startswith(rp(getattr(sys, name)))
1701 for name in names
1702 if hasattr(sys, name)
1703 )
1704 or module.__file__.endswith(EXTENSION_SUFFIXES)
1705 or 'site-packages' in module.__file__
1706 )
1708def _is_imported_module(module):
1709 return getattr(module, '__loader__', None) is not None or module in sys.modules.values()
1711@register(ModuleType)
1712def save_module(pickler, obj):
1713 if False: #_use_diff:
1714 if obj.__name__.split('.', 1)[0] != "dill":
1715 try:
1716 changed = diff.whats_changed(obj, seen=pickler._diff_cache)[0]
1717 except RuntimeError: # not memorised module, probably part of dill
1718 pass
1719 else:
1720 logger.trace(pickler, "M2: %s with diff", obj)
1721 logger.info("Diff: %s", changed.keys())
1722 pickler.save_reduce(_import_module, (obj.__name__,), obj=obj,
1723 state=changed)
1724 logger.trace(pickler, "# M2")
1725 return
1727 logger.trace(pickler, "M1: %s", obj)
1728 pickler.save_reduce(_import_module, (obj.__name__,), obj=obj)
1729 logger.trace(pickler, "# M1")
1730 else:
1731 builtin_mod = _is_builtin_module(obj)
1732 is_session_main = is_dill(pickler, child=True) and obj is pickler._main
1733 if (obj.__name__ not in ("builtins", "dill", "dill._dill") and not builtin_mod
1734 or is_session_main):
1735 logger.trace(pickler, "M1: %s", obj)
1736 # Hack for handling module-type objects in load_module().
1737 mod_name = obj.__name__ if _is_imported_module(obj) else '__runtime__.%s' % obj.__name__
1738 # Second references are saved as __builtin__.__main__ in save_module_dict().
1739 main_dict = obj.__dict__.copy()
1740 for item in ('__builtins__', '__loader__'):
1741 main_dict.pop(item, None)
1742 for item in IPYTHON_SINGLETONS: #pragma: no cover
1743 if getattr(main_dict.get(item), '__module__', '').startswith('IPython'):
1744 del main_dict[item]
1745 pickler.save_reduce(_import_module, (mod_name,), obj=obj, state=main_dict)
1746 logger.trace(pickler, "# M1")
1747 elif obj.__name__ == "dill._dill":
1748 logger.trace(pickler, "M2: %s", obj)
1749 pickler.save_global(obj, name="_dill")
1750 logger.trace(pickler, "# M2")
1751 else:
1752 logger.trace(pickler, "M2: %s", obj)
1753 pickler.save_reduce(_import_module, (obj.__name__,), obj=obj)
1754 logger.trace(pickler, "# M2")
1755 return
1757# The following function is based on '_extract_class_dict' from 'cloudpickle'
1758# Copyright (c) 2012, Regents of the University of California.
1759# Copyright (c) 2009 `PiCloud, Inc. <http://www.picloud.com>`_.
1760# License: https://github.com/cloudpipe/cloudpickle/blob/master/LICENSE
1761def _get_typedict_type(cls, clsdict, attrs, postproc_list):
1762 """Retrieve a copy of the dict of a class without the inherited methods"""
1763 if len(cls.__bases__) == 1:
1764 inherited_dict = cls.__bases__[0].__dict__
1765 else:
1766 inherited_dict = {}
1767 for base in reversed(cls.__bases__):
1768 inherited_dict.update(base.__dict__)
1769 to_remove = []
1770 for name, value in dict.items(clsdict):
1771 try:
1772 base_value = inherited_dict[name]
1773 if value is base_value and hasattr(value, '__qualname__'):
1774 to_remove.append(name)
1775 except KeyError:
1776 pass
1777 for name in to_remove:
1778 dict.pop(clsdict, name)
1780 if issubclass(type(cls), type):
1781 clsdict.pop('__dict__', None)
1782 clsdict.pop('__weakref__', None)
1783 # clsdict.pop('__prepare__', None)
1784 return clsdict, attrs
1786def _get_typedict_abc(obj, _dict, attrs, postproc_list):
1787 if hasattr(abc, '_get_dump'):
1788 (registry, _, _, _) = abc._get_dump(obj)
1789 register = obj.register
1790 postproc_list.extend((register, (reg(),)) for reg in registry)
1791 elif hasattr(obj, '_abc_registry'):
1792 registry = obj._abc_registry
1793 register = obj.register
1794 postproc_list.extend((register, (reg,)) for reg in registry)
1795 else:
1796 raise PicklingError("Cannot find registry of ABC %s", obj)
1798 if '_abc_registry' in _dict:
1799 _dict.pop('_abc_registry', None)
1800 _dict.pop('_abc_cache', None)
1801 _dict.pop('_abc_negative_cache', None)
1802 # _dict.pop('_abc_negative_cache_version', None)
1803 else:
1804 _dict.pop('_abc_impl', None)
1805 return _dict, attrs
1807@register(TypeType)
1808def save_type(pickler, obj, postproc_list=None):
1809 if obj in _typemap:
1810 logger.trace(pickler, "T1: %s", obj)
1811 # if obj in _incedental_types:
1812 # warnings.warn('Type %r may only exist on this implementation of Python and cannot be unpickled in other implementations.' % (obj,), PicklingWarning)
1813 pickler.save_reduce(_load_type, (_typemap[obj],), obj=obj)
1814 logger.trace(pickler, "# T1")
1815 elif obj.__bases__ == (tuple,) and all([hasattr(obj, attr) for attr in ('_fields','_asdict','_make','_replace')]):
1816 # special case: namedtuples
1817 logger.trace(pickler, "T6: %s", obj)
1819 obj_name = getattr(obj, '__qualname__', getattr(obj, '__name__', None))
1820 if obj.__name__ != obj_name:
1821 if postproc_list is None:
1822 postproc_list = []
1823 postproc_list.append((setattr, (obj, '__qualname__', obj_name)))
1825 if not obj._field_defaults:
1826 _save_with_postproc(pickler, (_create_namedtuple, (obj.__name__, obj._fields, obj.__module__)), obj=obj, postproc_list=postproc_list)
1827 else:
1828 defaults = [obj._field_defaults[field] for field in obj._fields if field in obj._field_defaults]
1829 _save_with_postproc(pickler, (_create_namedtuple, (obj.__name__, obj._fields, obj.__module__, defaults)), obj=obj, postproc_list=postproc_list)
1830 logger.trace(pickler, "# T6")
1831 return
1833 # special caes: NoneType, NotImplementedType, EllipsisType, EnumMeta, etc
1834 elif obj is type(None):
1835 logger.trace(pickler, "T7: %s", obj)
1836 #XXX: pickler.save_reduce(type, (None,), obj=obj)
1837 pickler.write(GLOBAL + b'__builtin__\nNoneType\n')
1838 logger.trace(pickler, "# T7")
1839 elif obj is NotImplementedType:
1840 logger.trace(pickler, "T7: %s", obj)
1841 pickler.save_reduce(type, (NotImplemented,), obj=obj)
1842 logger.trace(pickler, "# T7")
1843 elif obj is EllipsisType:
1844 logger.trace(pickler, "T7: %s", obj)
1845 pickler.save_reduce(type, (Ellipsis,), obj=obj)
1846 logger.trace(pickler, "# T7")
1847 elif obj is EnumMeta:
1848 logger.trace(pickler, "T7: %s", obj)
1849 pickler.write(GLOBAL + b'enum\nEnumMeta\n')
1850 logger.trace(pickler, "# T7")
1851 elif obj is ExceptHookArgsType: #NOTE: must be after NoneType for pypy
1852 logger.trace(pickler, "T7: %s", obj)
1853 pickler.write(GLOBAL + b'threading\nExceptHookArgs\n')
1854 logger.trace(pickler, "# T7")
1856 else:
1857 _byref = getattr(pickler, '_byref', None)
1858 obj_recursive = id(obj) in getattr(pickler, '_postproc', ())
1859 incorrectly_named = not _locate_function(obj, pickler)
1860 if not _byref and not obj_recursive and incorrectly_named: # not a function, but the name was held over
1861 if postproc_list is None:
1862 postproc_list = []
1864 # thanks to Tom Stepleton pointing out pickler._session unneeded
1865 logger.trace(pickler, "T2: %s", obj)
1866 _dict, attrs = _get_typedict_type(obj, obj.__dict__.copy(), None, postproc_list) # copy dict proxy to a dict
1868 #print (_dict)
1869 #print ("%s\n%s" % (type(obj), obj.__name__))
1870 #print ("%s\n%s" % (obj.__bases__, obj.__dict__))
1871 slots = _dict.get('__slots__', ())
1872 if type(slots) == str:
1873 # __slots__ accepts a single string
1874 slots = (slots,)
1876 for name in slots:
1877 _dict.pop(name, None)
1879 if isinstance(obj, abc.ABCMeta):
1880 logger.trace(pickler, "ABC: %s", obj)
1881 _dict, attrs = _get_typedict_abc(obj, _dict, attrs, postproc_list)
1882 logger.trace(pickler, "# ABC")
1884 qualname = getattr(obj, '__qualname__', None)
1885 if attrs is not None:
1886 for k, v in attrs.items():
1887 postproc_list.append((setattr, (obj, k, v)))
1888 # TODO: Consider using the state argument to save_reduce?
1889 if qualname is not None:
1890 postproc_list.append((setattr, (obj, '__qualname__', qualname)))
1892 if not hasattr(obj, '__orig_bases__'):
1893 _save_with_postproc(pickler, (_create_type, (
1894 type(obj), obj.__name__, obj.__bases__, _dict
1895 )), obj=obj, postproc_list=postproc_list)
1896 else:
1897 # This case will always work, but might be overkill.
1898 _metadict = {
1899 'metaclass': type(obj)
1900 }
1902 if _dict:
1903 _dict_update = PartialType(_setitems, source=_dict)
1904 else:
1905 _dict_update = None
1907 _save_with_postproc(pickler, (new_class, (
1908 obj.__name__, obj.__orig_bases__, _metadict, _dict_update
1909 )), obj=obj, postproc_list=postproc_list)
1910 logger.trace(pickler, "# T2")
1911 else:
1912 obj_name = getattr(obj, '__qualname__', getattr(obj, '__name__', None))
1913 logger.trace(pickler, "T4: %s", obj)
1914 if incorrectly_named:
1915 warnings.warn(
1916 "Cannot locate reference to %r." % (obj,),
1917 PicklingWarning,
1918 stacklevel=3,
1919 )
1920 if obj_recursive:
1921 warnings.warn(
1922 "Cannot pickle %r: %s.%s has recursive self-references that "
1923 "trigger a RecursionError." % (obj, obj.__module__, obj_name),
1924 PicklingWarning,
1925 stacklevel=3,
1926 )
1927 #print (obj.__dict__)
1928 #print ("%s\n%s" % (type(obj), obj.__name__))
1929 #print ("%s\n%s" % (obj.__bases__, obj.__dict__))
1930 StockPickler.save_global(pickler, obj, name=obj_name)
1931 logger.trace(pickler, "# T4")
1932 return
1934@register(property)
1935@register(abc.abstractproperty)
1936def save_property(pickler, obj):
1937 logger.trace(pickler, "Pr: %s", obj)
1938 pickler.save_reduce(type(obj), (obj.fget, obj.fset, obj.fdel, obj.__doc__),
1939 obj=obj)
1940 logger.trace(pickler, "# Pr")
1942@register(staticmethod)
1943@register(classmethod)
1944@register(abc.abstractstaticmethod)
1945@register(abc.abstractclassmethod)
1946def save_classmethod(pickler, obj):
1947 logger.trace(pickler, "Cm: %s", obj)
1948 orig_func = obj.__func__
1950 # if type(obj.__dict__) is dict:
1951 # if obj.__dict__:
1952 # state = obj.__dict__
1953 # else:
1954 # state = None
1955 # else:
1956 # state = (None, {'__dict__', obj.__dict__})
1958 pickler.save_reduce(type(obj), (orig_func,), obj=obj)
1959 logger.trace(pickler, "# Cm")
1961@register(FunctionType)
1962def save_function(pickler, obj):
1963 if not _locate_function(obj, pickler):
1964 if type(obj.__code__) is not CodeType:
1965 # Some PyPy builtin functions have no module name, and thus are not
1966 # able to be located
1967 module_name = getattr(obj, '__module__', None)
1968 if module_name is None:
1969 module_name = __builtin__.__name__
1970 module = _import_module(module_name, safe=True)
1971 _pypy_builtin = False
1972 try:
1973 found, _ = _getattribute(module, obj.__qualname__)
1974 if getattr(found, '__func__', None) is obj:
1975 _pypy_builtin = True
1976 except AttributeError:
1977 pass
1979 if _pypy_builtin:
1980 logger.trace(pickler, "F3: %s", obj)
1981 pickler.save_reduce(getattr, (found, '__func__'), obj=obj)
1982 logger.trace(pickler, "# F3")
1983 return
1985 logger.trace(pickler, "F1: %s", obj)
1986 _recurse = getattr(pickler, '_recurse', None)
1987 _postproc = getattr(pickler, '_postproc', None)
1988 _main_modified = getattr(pickler, '_main_modified', None)
1989 _original_main = getattr(pickler, '_original_main', __builtin__)#'None'
1990 postproc_list = []
1991 if _recurse:
1992 # recurse to get all globals referred to by obj
1993 from .detect import globalvars
1994 globs_copy = globalvars(obj, recurse=True, builtin=True)
1996 # Add the name of the module to the globs dictionary to prevent
1997 # the duplication of the dictionary. Pickle the unpopulated
1998 # globals dictionary and set the remaining items after the function
1999 # is created to correctly handle recursion.
2000 globs = {'__name__': obj.__module__}
2001 else:
2002 globs_copy = obj.__globals__
2004 # If the globals is the __dict__ from the module being saved as a
2005 # session, substitute it by the dictionary being actually saved.
2006 if _main_modified and globs_copy is _original_main.__dict__:
2007 globs_copy = getattr(pickler, '_main', _original_main).__dict__
2008 globs = globs_copy
2009 # If the globals is a module __dict__, do not save it in the pickle.
2010 elif globs_copy is not None and obj.__module__ is not None and \
2011 getattr(_import_module(obj.__module__, True), '__dict__', None) is globs_copy:
2012 globs = globs_copy
2013 else:
2014 globs = {'__name__': obj.__module__}
2016 if globs_copy is not None and globs is not globs_copy:
2017 # In the case that the globals are copied, we need to ensure that
2018 # the globals dictionary is updated when all objects in the
2019 # dictionary are already created.
2020 glob_ids = {id(g) for g in globs_copy.values()}
2021 for stack_element in _postproc:
2022 if stack_element in glob_ids:
2023 _postproc[stack_element].append((_setitems, (globs, globs_copy)))
2024 break
2025 else:
2026 postproc_list.append((_setitems, (globs, globs_copy)))
2028 closure = obj.__closure__
2029 state_dict = {}
2030 for fattrname in ('__doc__', '__kwdefaults__', '__annotations__'):
2031 fattr = getattr(obj, fattrname, None)
2032 if fattr is not None:
2033 state_dict[fattrname] = fattr
2034 if obj.__qualname__ != obj.__name__:
2035 state_dict['__qualname__'] = obj.__qualname__
2036 if '__name__' not in globs or obj.__module__ != globs['__name__']:
2037 state_dict['__module__'] = obj.__module__
2039 state = obj.__dict__
2040 if type(state) is not dict:
2041 state_dict['__dict__'] = state
2042 state = None
2043 if state_dict:
2044 state = state, state_dict
2046 _save_with_postproc(pickler, (_create_function, (
2047 obj.__code__, globs, obj.__name__, obj.__defaults__,
2048 closure
2049 ), state), obj=obj, postproc_list=postproc_list)
2051 # Lift closure cell update to earliest function (#458)
2052 if _postproc:
2053 topmost_postproc = next(iter(_postproc.values()), None)
2054 if closure and topmost_postproc:
2055 for cell in closure:
2056 possible_postproc = (setattr, (cell, 'cell_contents', obj))
2057 try:
2058 topmost_postproc.remove(possible_postproc)
2059 except ValueError:
2060 continue
2062 # Change the value of the cell
2063 pickler.save_reduce(*possible_postproc)
2064 # pop None created by calling preprocessing step off stack
2065 pickler.write(POP)
2067 logger.trace(pickler, "# F1")
2068 else:
2069 logger.trace(pickler, "F2: %s", obj)
2070 name = getattr(obj, '__qualname__', getattr(obj, '__name__', None))
2071 StockPickler.save_global(pickler, obj, name=name)
2072 logger.trace(pickler, "# F2")
2073 return
2075if HAS_CTYPES and hasattr(ctypes, 'pythonapi'):
2076 _PyCapsule_New = ctypes.pythonapi.PyCapsule_New
2077 _PyCapsule_New.argtypes = (ctypes.c_void_p, ctypes.c_char_p, ctypes.c_void_p)
2078 _PyCapsule_New.restype = ctypes.py_object
2079 _PyCapsule_GetPointer = ctypes.pythonapi.PyCapsule_GetPointer
2080 _PyCapsule_GetPointer.argtypes = (ctypes.py_object, ctypes.c_char_p)
2081 _PyCapsule_GetPointer.restype = ctypes.c_void_p
2082 _PyCapsule_GetDestructor = ctypes.pythonapi.PyCapsule_GetDestructor
2083 _PyCapsule_GetDestructor.argtypes = (ctypes.py_object,)
2084 _PyCapsule_GetDestructor.restype = ctypes.c_void_p
2085 _PyCapsule_GetContext = ctypes.pythonapi.PyCapsule_GetContext
2086 _PyCapsule_GetContext.argtypes = (ctypes.py_object,)
2087 _PyCapsule_GetContext.restype = ctypes.c_void_p
2088 _PyCapsule_GetName = ctypes.pythonapi.PyCapsule_GetName
2089 _PyCapsule_GetName.argtypes = (ctypes.py_object,)
2090 _PyCapsule_GetName.restype = ctypes.c_char_p
2091 _PyCapsule_IsValid = ctypes.pythonapi.PyCapsule_IsValid
2092 _PyCapsule_IsValid.argtypes = (ctypes.py_object, ctypes.c_char_p)
2093 _PyCapsule_IsValid.restype = ctypes.c_bool
2094 _PyCapsule_SetContext = ctypes.pythonapi.PyCapsule_SetContext
2095 _PyCapsule_SetContext.argtypes = (ctypes.py_object, ctypes.c_void_p)
2096 _PyCapsule_SetDestructor = ctypes.pythonapi.PyCapsule_SetDestructor
2097 _PyCapsule_SetDestructor.argtypes = (ctypes.py_object, ctypes.c_void_p)
2098 _PyCapsule_SetName = ctypes.pythonapi.PyCapsule_SetName
2099 _PyCapsule_SetName.argtypes = (ctypes.py_object, ctypes.c_char_p)
2100 _PyCapsule_SetPointer = ctypes.pythonapi.PyCapsule_SetPointer
2101 _PyCapsule_SetPointer.argtypes = (ctypes.py_object, ctypes.c_void_p)
2102 #from _socket import CAPI as _testcapsule
2103 _testcapsule_name = b'dill._dill._testcapsule'
2104 _testcapsule = _PyCapsule_New(
2105 ctypes.cast(_PyCapsule_New, ctypes.c_void_p),
2106 ctypes.c_char_p(_testcapsule_name),
2107 None
2108 )
2109 PyCapsuleType = type(_testcapsule)
2110 @register(PyCapsuleType)
2111 def save_capsule(pickler, obj):
2112 logger.trace(pickler, "Cap: %s", obj)
2113 name = _PyCapsule_GetName(obj)
2114 #warnings.warn('Pickling a PyCapsule (%s) does not pickle any C data structures and could cause segmentation faults or other memory errors when unpickling.' % (name,), PicklingWarning)
2115 pointer = _PyCapsule_GetPointer(obj, name)
2116 context = _PyCapsule_GetContext(obj)
2117 destructor = _PyCapsule_GetDestructor(obj)
2118 pickler.save_reduce(_create_capsule, (pointer, name, context, destructor), obj=obj)
2119 logger.trace(pickler, "# Cap")
2120 _incedental_reverse_typemap['PyCapsuleType'] = PyCapsuleType
2121 _reverse_typemap['PyCapsuleType'] = PyCapsuleType
2122 _incedental_types.add(PyCapsuleType)
2123else:
2124 _testcapsule = None
2126@register(ContextType)
2127def save_context(pickler, obj):
2128 logger.trace(pickler, "Cx: %s", obj)
2129 pickler.save_reduce(ContextType, tuple(obj.items()), obj=obj)
2130 logger.trace(pickler, "# Cx")
2133#############################
2134# A quick fix for issue #500
2135# This should be removed when a better solution is found.
2137if hasattr(dataclasses, "_HAS_DEFAULT_FACTORY_CLASS"):
2138 @register(dataclasses._HAS_DEFAULT_FACTORY_CLASS)
2139 def save_dataclasses_HAS_DEFAULT_FACTORY_CLASS(pickler, obj):
2140 logger.trace(pickler, "DcHDF: %s", obj)
2141 pickler.write(GLOBAL + b"dataclasses\n_HAS_DEFAULT_FACTORY\n")
2142 logger.trace(pickler, "# DcHDF")
2144if hasattr(dataclasses, "MISSING"):
2145 @register(type(dataclasses.MISSING))
2146 def save_dataclasses_MISSING_TYPE(pickler, obj):
2147 logger.trace(pickler, "DcM: %s", obj)
2148 pickler.write(GLOBAL + b"dataclasses\nMISSING\n")
2149 logger.trace(pickler, "# DcM")
2151if hasattr(dataclasses, "KW_ONLY"):
2152 @register(type(dataclasses.KW_ONLY))
2153 def save_dataclasses_KW_ONLY_TYPE(pickler, obj):
2154 logger.trace(pickler, "DcKWO: %s", obj)
2155 pickler.write(GLOBAL + b"dataclasses\nKW_ONLY\n")
2156 logger.trace(pickler, "# DcKWO")
2158if hasattr(dataclasses, "_FIELD_BASE"):
2159 @register(dataclasses._FIELD_BASE)
2160 def save_dataclasses_FIELD_BASE(pickler, obj):
2161 logger.trace(pickler, "DcFB: %s", obj)
2162 pickler.write(GLOBAL + b"dataclasses\n" + obj.name.encode() + b"\n")
2163 logger.trace(pickler, "# DcFB")
2165#############################
2167# quick sanity checking
2168def pickles(obj,exact=False,safe=False,**kwds):
2169 """
2170 Quick check if object pickles with dill.
2172 If *exact=True* then an equality test is done to check if the reconstructed
2173 object matches the original object.
2175 If *safe=True* then any exception will raised in copy signal that the
2176 object is not picklable, otherwise only pickling errors will be trapped.
2178 Additional keyword arguments are as :func:`dumps` and :func:`loads`.
2179 """
2180 if safe: exceptions = (Exception,) # RuntimeError, ValueError
2181 else:
2182 exceptions = (TypeError, AssertionError, NotImplementedError, PicklingError, UnpicklingError)
2183 try:
2184 pik = copy(obj, **kwds)
2185 #FIXME: should check types match first, then check content if "exact"
2186 try:
2187 #FIXME: should be "(pik == obj).all()" for numpy comparison, though that'll fail if shapes differ
2188 result = bool(pik.all() == obj.all())
2189 except (AttributeError, TypeError):
2190 warnings.filterwarnings('ignore') #FIXME: be specific
2191 result = pik == obj
2192 if warnings.filters: del warnings.filters[0]
2193 if hasattr(result, 'toarray'): # for unusual types like sparse matrix
2194 result = result.toarray().all()
2195 if result: return True
2196 if not exact:
2197 result = type(pik) == type(obj)
2198 if result: return result
2199 # class instances might have been dumped with byref=False
2200 return repr(type(pik)) == repr(type(obj)) #XXX: InstanceType?
2201 return False
2202 except exceptions:
2203 return False
2205def check(obj, *args, **kwds):
2206 """
2207 Check pickling of an object across another process.
2209 *python* is the path to the python interpreter (defaults to sys.executable)
2211 Set *verbose=True* to print the unpickled object in the other process.
2213 Additional keyword arguments are as :func:`dumps` and :func:`loads`.
2214 """
2215 # == undocumented ==
2216 # python -- the string path or executable name of the selected python
2217 # verbose -- if True, be verbose about printing warning messages
2218 # all other args and kwds are passed to dill.dumps #FIXME: ignore on load
2219 verbose = kwds.pop('verbose', False)
2220 python = kwds.pop('python', None)
2221 if python is None:
2222 import sys
2223 python = sys.executable
2224 # type check
2225 isinstance(python, str)
2226 import subprocess
2227 fail = True
2228 try:
2229 _obj = dumps(obj, *args, **kwds)
2230 fail = False
2231 finally:
2232 if fail and verbose:
2233 print("DUMP FAILED")
2234 #FIXME: fails if python interpreter path contains spaces
2235 # Use the following instead (which also processes the 'ignore' keyword):
2236 # ignore = kwds.pop('ignore', None)
2237 # unpickle = "dill.loads(%s, ignore=%s)"%(repr(_obj), repr(ignore))
2238 # cmd = [python, "-c", "import dill; print(%s)"%unpickle]
2239 # msg = "SUCCESS" if not subprocess.call(cmd) else "LOAD FAILED"
2240 msg = "%s -c import dill; print(dill.loads(%s))" % (python, repr(_obj))
2241 msg = "SUCCESS" if not subprocess.call(msg.split(None,2)) else "LOAD FAILED"
2242 if verbose:
2243 print(msg)
2244 return
2246# use to protect against missing attributes
2247def is_dill(pickler, child=None):
2248 "check the dill-ness of your pickler"
2249 if child is False or not hasattr(pickler.__class__, 'mro'):
2250 return 'dill' in pickler.__module__
2251 return Pickler in pickler.__class__.mro()
2253def _extend():
2254 """extend pickle with all of dill's registered types"""
2255 # need to have pickle not choke on _main_module? use is_dill(pickler)
2256 for t,func in Pickler.dispatch.items():
2257 try:
2258 StockPickler.dispatch[t] = func
2259 except Exception: #TypeError, PicklingError, UnpicklingError
2260 logger.trace(pickler, "skip: %s", t)
2261 return
2263del diff, _use_diff, use_diff
2265# EOF