Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.10/site-packages/dill/_dill.py: 41%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1398 statements  

1# -*- coding: utf-8 -*- 

2# 

3# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) 

4# Copyright (c) 2008-2015 California Institute of Technology. 

5# Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. 

6# License: 3-clause BSD. The full license text is available at: 

7# - https://github.com/uqfoundation/dill/blob/master/LICENSE 

8""" 

9dill: a utility for serialization of python objects 

10 

11The primary functions in `dill` are :func:`dump` and 

12:func:`dumps` for serialization ("pickling") to a 

13file or to a string, respectively, and :func:`load` 

14and :func:`loads` for deserialization ("unpickling"), 

15similarly, from a file or from a string. Other notable 

16functions are :func:`~dill.dump_module` and 

17:func:`~dill.load_module`, which are used to save and 

18restore module objects, including an intepreter session. 

19 

20Based on code written by Oren Tirosh and Armin Ronacher. 

21Extended to a (near) full set of the builtin types (in types module), 

22and coded to the pickle interface, by <mmckerns@caltech.edu>. 

23Initial port to python3 by Jonathan Dobson, continued by mmckerns. 

24Tested against "all" python types (Std. Lib. CH 1-15 @ 2.7) by mmckerns. 

25Tested against CH16+ Std. Lib. ... TBD. 

26""" 

27 

28from __future__ import annotations 

29 

30__all__ = [ 

31 'dump','dumps','load','loads','copy', 

32 'Pickler','Unpickler','register','pickle','pickles','check', 

33 'DEFAULT_PROTOCOL','HIGHEST_PROTOCOL','HANDLE_FMODE','CONTENTS_FMODE','FILE_FMODE', 

34 'PickleError','PickleWarning','PicklingError','PicklingWarning','UnpicklingError', 

35 'UnpicklingWarning', 

36] 

37 

38__module__ = 'dill' 

39 

40import warnings 

41from .logger import adapter as logger 

42from .logger import trace as _trace 

43log = logger # backward compatibility (see issue #582) 

44 

45import os 

46import sys 

47diff = None 

48_use_diff = False 

49OLD38 = (sys.hexversion < 0x3080000) 

50OLD39 = (sys.hexversion < 0x3090000) 

51OLD310 = (sys.hexversion < 0x30a0000) 

52OLD312a7 = (sys.hexversion < 0x30c00a7) 

53#XXX: get types from .objtypes ? 

54import builtins as __builtin__ 

55from pickle import _Pickler as StockPickler, Unpickler as StockUnpickler 

56from pickle import GLOBAL, POP 

57from _contextvars import Context as ContextType 

58from _thread import LockType 

59from _thread import RLock as RLockType 

60try: 

61 from _thread import _ExceptHookArgs as ExceptHookArgsType 

62except ImportError: 

63 ExceptHookArgsType = None 

64try: 

65 from _thread import _ThreadHandle as ThreadHandleType 

66except ImportError: 

67 ThreadHandleType = None 

68#from io import IOBase 

69from types import CodeType, FunctionType, MethodType, GeneratorType, \ 

70 TracebackType, FrameType, ModuleType, BuiltinMethodType 

71BufferType = memoryview #XXX: unregistered 

72ClassType = type # no 'old-style' classes 

73EllipsisType = type(Ellipsis) 

74#FileType = IOBase 

75NotImplementedType = type(NotImplemented) 

76SliceType = slice 

77TypeType = type # 'new-style' classes #XXX: unregistered 

78XRangeType = range 

79from types import MappingProxyType as DictProxyType, new_class 

80from pickle import DEFAULT_PROTOCOL, HIGHEST_PROTOCOL, PickleError, PicklingError, UnpicklingError 

81import __main__ as _main_module 

82import marshal 

83import gc 

84# import zlib 

85import abc 

86import dataclasses 

87from weakref import ReferenceType, ProxyType, CallableProxyType 

88from collections import OrderedDict 

89from enum import Enum, EnumMeta 

90from functools import partial 

91from operator import itemgetter, attrgetter 

92GENERATOR_FAIL = False 

93import importlib.machinery 

94EXTENSION_SUFFIXES = tuple(importlib.machinery.EXTENSION_SUFFIXES) 

95try: 

96 import ctypes 

97 HAS_CTYPES = True 

98 # if using `pypy`, pythonapi is not found 

99 IS_PYPY = not hasattr(ctypes, 'pythonapi') 

100except ImportError: 

101 HAS_CTYPES = False 

102 IS_PYPY = False 

103NumpyUfuncType = None 

104NumpyDType = None 

105NumpyArrayType = None 

106try: 

107 if not importlib.machinery.PathFinder().find_spec('numpy'): 

108 raise ImportError("No module named 'numpy'") 

109 NumpyUfuncType = True 

110 NumpyDType = True 

111 NumpyArrayType = True 

112except ImportError: 

113 pass 

114def __hook__(): 

115 global NumpyArrayType, NumpyDType, NumpyUfuncType 

116 from numpy import ufunc as NumpyUfuncType 

117 from numpy import ndarray as NumpyArrayType 

118 from numpy import dtype as NumpyDType 

119 return True 

120if NumpyArrayType: # then has numpy 

121 def ndarraysubclassinstance(obj_type): 

122 if all((c.__module__, c.__name__) != ('numpy', 'ndarray') for c in obj_type.__mro__): 

123 return False 

124 # anything below here is a numpy array (or subclass) instance 

125 __hook__() # import numpy (so the following works!!!) 

126 # verify that __reduce__ has not been overridden 

127 if obj_type.__reduce_ex__ is not NumpyArrayType.__reduce_ex__ \ 

128 or obj_type.__reduce__ is not NumpyArrayType.__reduce__: 

129 return False 

130 return True 

131 def numpyufunc(obj_type): 

132 return any((c.__module__, c.__name__) == ('numpy', 'ufunc') for c in obj_type.__mro__) 

133 def numpydtype(obj_type): 

134 if all((c.__module__, c.__name__) != ('numpy', 'dtype') for c in obj_type.__mro__): 

135 return False 

136 # anything below here is a numpy dtype 

137 __hook__() # import numpy (so the following works!!!) 

138 return obj_type is type(NumpyDType) # handles subclasses 

139else: 

140 def ndarraysubclassinstance(obj): return False 

141 def numpyufunc(obj): return False 

142 def numpydtype(obj): return False 

143 

144from types import GetSetDescriptorType, ClassMethodDescriptorType, \ 

145 WrapperDescriptorType, MethodDescriptorType, MemberDescriptorType, \ 

146 MethodWrapperType #XXX: unused 

147 

148# make sure to add these 'hand-built' types to _typemap 

149CellType = type((lambda x: lambda y: x)(0).__closure__[0]) 

150PartialType = type(partial(int, base=2)) 

151SuperType = type(super(Exception, TypeError())) 

152ItemGetterType = type(itemgetter(0)) 

153AttrGetterType = type(attrgetter('__repr__')) 

154 

155try: 

156 from functools import _lru_cache_wrapper as LRUCacheType 

157except ImportError: 

158 LRUCacheType = None 

159 

160if not isinstance(LRUCacheType, type): 

161 LRUCacheType = None 

162 

163def get_file_type(*args, **kwargs): 

164 open = kwargs.pop("open", __builtin__.open) 

165 f = open(os.devnull, *args, **kwargs) 

166 t = type(f) 

167 f.close() 

168 return t 

169 

170IS_PYODIDE = sys.platform == 'emscripten' 

171 

172FileType = get_file_type('rb', buffering=0) 

173TextWrapperType = get_file_type('r', buffering=-1) 

174BufferedRandomType = None if IS_PYODIDE else get_file_type('r+b', buffering=-1) 

175BufferedReaderType = get_file_type('rb', buffering=-1) 

176BufferedWriterType = get_file_type('wb', buffering=-1) 

177try: 

178 from _pyio import open as _open 

179 PyTextWrapperType = get_file_type('r', buffering=-1, open=_open) 

180 PyBufferedRandomType = None if IS_PYODIDE else get_file_type('r+b', buffering=-1, open=_open) 

181 PyBufferedReaderType = get_file_type('rb', buffering=-1, open=_open) 

182 PyBufferedWriterType = get_file_type('wb', buffering=-1, open=_open) 

183except ImportError: 

184 PyTextWrapperType = PyBufferedRandomType = PyBufferedReaderType = PyBufferedWriterType = None 

185from io import BytesIO as StringIO 

186InputType = OutputType = None 

187from socket import socket as SocketType 

188#FIXME: additionally calls ForkingPickler.register several times 

189from multiprocessing.reduction import _reduce_socket as reduce_socket 

190try: #pragma: no cover 

191 IS_IPYTHON = __IPYTHON__ # is True 

192 ExitType = None # IPython.core.autocall.ExitAutocall 

193 IPYTHON_SINGLETONS = ('exit', 'quit', 'get_ipython') 

194except NameError: 

195 IS_IPYTHON = False 

196 try: ExitType = type(exit) # apparently 'exit' can be removed 

197 except NameError: ExitType = None 

198 IPYTHON_SINGLETONS = () 

199 

200import inspect 

201import typing 

202 

203 

204### Shims for different versions of Python and dill 

205class Sentinel(object): 

206 """ 

207 Create a unique sentinel object that is pickled as a constant. 

208 """ 

209 def __init__(self, name, module_name=None): 

210 self.name = name 

211 if module_name is None: 

212 # Use the calling frame's module 

213 self.__module__ = inspect.currentframe().f_back.f_globals['__name__'] 

214 else: 

215 self.__module__ = module_name # pragma: no cover 

216 def __repr__(self): 

217 return self.__module__ + '.' + self.name # pragma: no cover 

218 def __copy__(self): 

219 return self # pragma: no cover 

220 def __deepcopy__(self, memo): 

221 return self # pragma: no cover 

222 def __reduce__(self): 

223 return self.name 

224 def __reduce_ex__(self, protocol): 

225 return self.name 

226 

227from . import _shims 

228from ._shims import Reduce, Getattr 

229 

230### File modes 

231#: Pickles the file handle, preserving mode. The position of the unpickled 

232#: object is as for a new file handle. 

233HANDLE_FMODE = 0 

234#: Pickles the file contents, creating a new file if on load the file does 

235#: not exist. The position = min(pickled position, EOF) and mode is chosen 

236#: as such that "best" preserves behavior of the original file. 

237CONTENTS_FMODE = 1 

238#: Pickles the entire file (handle and contents), preserving mode and position. 

239FILE_FMODE = 2 

240 

241### Shorthands (modified from python2.5/lib/pickle.py) 

242def copy(obj, *args, **kwds): 

243 """ 

244 Use pickling to 'copy' an object (i.e. `loads(dumps(obj))`). 

245 

246 See :func:`dumps` and :func:`loads` for keyword arguments. 

247 """ 

248 ignore = kwds.pop('ignore', Unpickler.settings['ignore']) 

249 return loads(dumps(obj, *args, **kwds), ignore=ignore) 

250 

251def dump(obj, file, protocol=None, byref=None, fmode=None, recurse=None, **kwds):#, strictio=None): 

252 """ 

253 Pickle an object to a file. 

254 

255 See :func:`dumps` for keyword arguments. 

256 """ 

257 from .settings import settings 

258 protocol = settings['protocol'] if protocol is None else int(protocol) 

259 _kwds = kwds.copy() 

260 _kwds.update(dict(byref=byref, fmode=fmode, recurse=recurse)) 

261 Pickler(file, protocol, **_kwds).dump(obj) 

262 return 

263 

264def dumps(obj, protocol=None, byref=None, fmode=None, recurse=None, **kwds):#, strictio=None): 

265 """ 

266 Pickle an object to a string. 

267 

268 *protocol* is the pickler protocol, as defined for Python *pickle*. 

269 

270 If *byref=True*, then dill behaves a lot more like pickle as certain 

271 objects (like modules) are pickled by reference as opposed to attempting 

272 to pickle the object itself. 

273 

274 If *recurse=True*, then objects referred to in the global dictionary 

275 are recursively traced and pickled, instead of the default behavior 

276 of attempting to store the entire global dictionary. This is needed for 

277 functions defined via *exec()*. 

278 

279 *fmode* (:const:`HANDLE_FMODE`, :const:`CONTENTS_FMODE`, 

280 or :const:`FILE_FMODE`) indicates how file handles will be pickled. 

281 For example, when pickling a data file handle for transfer to a remote 

282 compute service, *FILE_FMODE* will include the file contents in the 

283 pickle and cursor position so that a remote method can operate 

284 transparently on an object with an open file handle. 

285 

286 Default values for keyword arguments can be set in :mod:`dill.settings`. 

287 """ 

288 file = StringIO() 

289 dump(obj, file, protocol, byref, fmode, recurse, **kwds)#, strictio) 

290 return file.getvalue() 

291 

292def load(file, ignore=None, **kwds): 

293 """ 

294 Unpickle an object from a file. 

295 

296 See :func:`loads` for keyword arguments. 

297 """ 

298 return Unpickler(file, ignore=ignore, **kwds).load() 

299 

300def loads(str, ignore=None, **kwds): 

301 """ 

302 Unpickle an object from a string. 

303 

304 If *ignore=False* then objects whose class is defined in the module 

305 *__main__* are updated to reference the existing class in *__main__*, 

306 otherwise they are left to refer to the reconstructed type, which may 

307 be different. 

308 

309 Default values for keyword arguments can be set in :mod:`dill.settings`. 

310 """ 

311 file = StringIO(str) 

312 return load(file, ignore, **kwds) 

313 

314# def dumpzs(obj, protocol=None): 

315# """pickle an object to a compressed string""" 

316# return zlib.compress(dumps(obj, protocol)) 

317 

318# def loadzs(str): 

319# """unpickle an object from a compressed string""" 

320# return loads(zlib.decompress(str)) 

321 

322### End: Shorthands ### 

323 

324class MetaCatchingDict(dict): 

325 def get(self, key, default=None): 

326 try: 

327 return self[key] 

328 except KeyError: 

329 return default 

330 

331 def __missing__(self, key): 

332 if issubclass(key, type): 

333 return save_type 

334 else: 

335 raise KeyError() 

336 

337class PickleWarning(Warning, PickleError): 

338 pass 

339 

340class PicklingWarning(PickleWarning, PicklingError): 

341 pass 

342 

343class UnpicklingWarning(PickleWarning, UnpicklingError): 

344 pass 

345 

346### Extend the Picklers 

347class Pickler(StockPickler): 

348 """python's Pickler extended to interpreter sessions""" 

349 dispatch: typing.Dict[type, typing.Callable[[Pickler, typing.Any], None]] \ 

350 = MetaCatchingDict(StockPickler.dispatch.copy()) 

351 """The dispatch table, a dictionary of serializing functions used 

352 by Pickler to save objects of specific types. Use :func:`pickle` 

353 or :func:`register` to associate types to custom functions. 

354 

355 :meta hide-value: 

356 """ 

357 _session = False 

358 from .settings import settings 

359 

360 def __init__(self, file, *args, **kwds): 

361 settings = Pickler.settings 

362 _byref = kwds.pop('byref', None) 

363 #_strictio = kwds.pop('strictio', None) 

364 _fmode = kwds.pop('fmode', None) 

365 _recurse = kwds.pop('recurse', None) 

366 StockPickler.__init__(self, file, *args, **kwds) 

367 self._main = _main_module 

368 self._diff_cache = {} 

369 self._byref = settings['byref'] if _byref is None else _byref 

370 self._strictio = False #_strictio 

371 self._fmode = settings['fmode'] if _fmode is None else _fmode 

372 self._recurse = settings['recurse'] if _recurse is None else _recurse 

373 self._postproc = OrderedDict() 

374 self._file = file 

375 

376 def save(self, obj, save_persistent_id=True): 

377 # numpy hack 

378 obj_type = type(obj) 

379 if NumpyArrayType and not (obj_type is type or obj_type in Pickler.dispatch): 

380 # register if the object is a numpy ufunc 

381 # thanks to Paul Kienzle for pointing out ufuncs didn't pickle 

382 if numpyufunc(obj_type): 

383 @register(obj_type) 

384 def save_numpy_ufunc(pickler, obj): 

385 logger.trace(pickler, "Nu: %s", obj) 

386 name = getattr(obj, '__qualname__', getattr(obj, '__name__', None)) 

387 StockPickler.save_global(pickler, obj, name=name) 

388 logger.trace(pickler, "# Nu") 

389 return 

390 # NOTE: the above 'save' performs like: 

391 # import copy_reg 

392 # def udump(f): return f.__name__ 

393 # def uload(name): return getattr(numpy, name) 

394 # copy_reg.pickle(NumpyUfuncType, udump, uload) 

395 # register if the object is a numpy dtype 

396 if numpydtype(obj_type): 

397 @register(obj_type) 

398 def save_numpy_dtype(pickler, obj): 

399 logger.trace(pickler, "Dt: %s", obj) 

400 pickler.save_reduce(_create_dtypemeta, (obj.type,), obj=obj) 

401 logger.trace(pickler, "# Dt") 

402 return 

403 # NOTE: the above 'save' performs like: 

404 # import copy_reg 

405 # def uload(name): return type(NumpyDType(name)) 

406 # def udump(f): return uload, (f.type,) 

407 # copy_reg.pickle(NumpyDTypeType, udump, uload) 

408 # register if the object is a subclassed numpy array instance 

409 if ndarraysubclassinstance(obj_type): 

410 @register(obj_type) 

411 def save_numpy_array(pickler, obj): 

412 logger.trace(pickler, "Nu: (%s, %s)", obj.shape, obj.dtype) 

413 npdict = getattr(obj, '__dict__', None) 

414 f, args, state = obj.__reduce__() 

415 pickler.save_reduce(_create_array, (f,args,state,npdict), obj=obj) 

416 logger.trace(pickler, "# Nu") 

417 return 

418 # end numpy hack 

419 

420 if GENERATOR_FAIL and obj_type is GeneratorType: 

421 msg = "Can't pickle %s: attribute lookup builtins.generator failed" % GeneratorType 

422 raise PicklingError(msg) 

423 StockPickler.save(self, obj, save_persistent_id) 

424 

425 save.__doc__ = StockPickler.save.__doc__ 

426 

427 def dump(self, obj): #NOTE: if settings change, need to update attributes 

428 logger.trace_setup(self) 

429 StockPickler.dump(self, obj) 

430 dump.__doc__ = StockPickler.dump.__doc__ 

431 

432class Unpickler(StockUnpickler): 

433 """python's Unpickler extended to interpreter sessions and more types""" 

434 from .settings import settings 

435 _session = False 

436 

437 def find_class(self, module, name): 

438 if (module, name) == ('__builtin__', '__main__'): 

439 return self._main.__dict__ #XXX: above set w/save_module_dict 

440 elif (module, name) == ('__builtin__', 'NoneType'): 

441 return type(None) #XXX: special case: NoneType missing 

442 if module == 'dill.dill': module = 'dill._dill' 

443 return StockUnpickler.find_class(self, module, name) 

444 

445 def __init__(self, *args, **kwds): 

446 settings = Pickler.settings 

447 _ignore = kwds.pop('ignore', None) 

448 StockUnpickler.__init__(self, *args, **kwds) 

449 self._main = _main_module 

450 self._ignore = settings['ignore'] if _ignore is None else _ignore 

451 

452 def load(self): #NOTE: if settings change, need to update attributes 

453 obj = StockUnpickler.load(self) 

454 if type(obj).__module__ == getattr(_main_module, '__name__', '__main__'): 

455 if not self._ignore: 

456 # point obj class to main 

457 try: obj.__class__ = getattr(self._main, type(obj).__name__) 

458 except (AttributeError,TypeError): pass # defined in a file 

459 #_main_module.__dict__.update(obj.__dict__) #XXX: should update globals ? 

460 return obj 

461 load.__doc__ = StockUnpickler.load.__doc__ 

462 pass 

463 

464''' 

465def dispatch_table(): 

466 """get the dispatch table of registered types""" 

467 return Pickler.dispatch 

468''' 

469 

470pickle_dispatch_copy = StockPickler.dispatch.copy() 

471 

472def pickle(t, func): 

473 """expose :attr:`~Pickler.dispatch` table for user-created extensions""" 

474 Pickler.dispatch[t] = func 

475 return 

476 

477def register(t): 

478 """decorator to register types to Pickler's :attr:`~Pickler.dispatch` table""" 

479 def proxy(func): 

480 Pickler.dispatch[t] = func 

481 return func 

482 return proxy 

483 

484def _revert_extension(): 

485 """drop dill-registered types from pickle's dispatch table""" 

486 for type, func in list(StockPickler.dispatch.items()): 

487 if func.__module__ == __name__: 

488 del StockPickler.dispatch[type] 

489 if type in pickle_dispatch_copy: 

490 StockPickler.dispatch[type] = pickle_dispatch_copy[type] 

491 

492def use_diff(on=True): 

493 """ 

494 Reduces size of pickles by only including object which have changed. 

495 

496 Decreases pickle size but increases CPU time needed. 

497 Also helps avoid some unpickleable objects. 

498 MUST be called at start of script, otherwise changes will not be recorded. 

499 """ 

500 global _use_diff, diff 

501 _use_diff = on 

502 if _use_diff and diff is None: 

503 try: 

504 from . import diff as d 

505 except ImportError: 

506 import diff as d 

507 diff = d 

508 

509def _create_typemap(): 

510 import types 

511 d = dict(list(__builtin__.__dict__.items()) + \ 

512 list(types.__dict__.items())).items() 

513 for key, value in d: 

514 if getattr(value, '__module__', None) == 'builtins' \ 

515 and type(value) is type: 

516 yield key, value 

517 return 

518_reverse_typemap = dict(_create_typemap()) 

519_reverse_typemap.update({ 

520 'PartialType': PartialType, 

521 'SuperType': SuperType, 

522 'ItemGetterType': ItemGetterType, 

523 'AttrGetterType': AttrGetterType, 

524}) 

525if sys.hexversion < 0x30800a2: 

526 _reverse_typemap.update({ 

527 'CellType': CellType, 

528 }) 

529 

530# "Incidental" implementation specific types. Unpickling these types in another 

531# implementation of Python (PyPy -> CPython) is not guaranteed to work 

532 

533# This dictionary should contain all types that appear in Python implementations 

534# but are not defined in https://docs.python.org/3/library/types.html#standard-interpreter-types 

535x=OrderedDict() 

536_incedental_reverse_typemap = { 

537 'FileType': FileType, 

538 'BufferedRandomType': BufferedRandomType, 

539 'BufferedReaderType': BufferedReaderType, 

540 'BufferedWriterType': BufferedWriterType, 

541 'TextWrapperType': TextWrapperType, 

542 'PyBufferedRandomType': PyBufferedRandomType, 

543 'PyBufferedReaderType': PyBufferedReaderType, 

544 'PyBufferedWriterType': PyBufferedWriterType, 

545 'PyTextWrapperType': PyTextWrapperType, 

546} 

547 

548_incedental_reverse_typemap.update({ 

549 "DictKeysType": type({}.keys()), 

550 "DictValuesType": type({}.values()), 

551 "DictItemsType": type({}.items()), 

552 

553 "OdictKeysType": type(x.keys()), 

554 "OdictValuesType": type(x.values()), 

555 "OdictItemsType": type(x.items()), 

556}) 

557 

558if ExitType: 

559 _incedental_reverse_typemap['ExitType'] = ExitType 

560if InputType: 

561 _incedental_reverse_typemap['InputType'] = InputType 

562 _incedental_reverse_typemap['OutputType'] = OutputType 

563 

564''' 

565try: 

566 import symtable 

567 _incedental_reverse_typemap["SymtableEntryType"] = type(symtable.symtable("", "string", "exec")._table) 

568except: #FIXME: fails to pickle 

569 pass 

570 

571if sys.hexversion >= 0x30a00a0: 

572 _incedental_reverse_typemap['LineIteratorType'] = type(compile('3', '', 'eval').co_lines()) 

573''' 

574 

575if sys.hexversion >= 0x30b00b0 and not IS_PYPY: 

576 from types import GenericAlias 

577 _incedental_reverse_typemap["GenericAliasIteratorType"] = type(iter(GenericAlias(list, (int,)))) 

578 ''' 

579 _incedental_reverse_typemap['PositionsIteratorType'] = type(compile('3', '', 'eval').co_positions()) 

580 ''' 

581 

582try: 

583 import winreg 

584 _incedental_reverse_typemap["HKEYType"] = winreg.HKEYType 

585except ImportError: 

586 pass 

587 

588_reverse_typemap.update(_incedental_reverse_typemap) 

589_incedental_types = set(_incedental_reverse_typemap.values()) 

590 

591del x 

592 

593_typemap = dict((v, k) for k, v in _reverse_typemap.items()) 

594 

595def _unmarshal(string): 

596 return marshal.loads(string) 

597 

598def _load_type(name): 

599 return _reverse_typemap[name] 

600 

601def _create_type(typeobj, *args): 

602 return typeobj(*args) 

603 

604def _create_function(fcode, fglobals, fname=None, fdefaults=None, 

605 fclosure=None, fdict=None, fkwdefaults=None): 

606 # same as FunctionType, but enable passing __dict__ to new function, 

607 # __dict__ is the storehouse for attributes added after function creation 

608 func = FunctionType(fcode, fglobals or dict(), fname, fdefaults, fclosure) 

609 if fdict is not None: 

610 func.__dict__.update(fdict) #XXX: better copy? option to copy? 

611 if fkwdefaults is not None: 

612 func.__kwdefaults__ = fkwdefaults 

613 # 'recurse' only stores referenced modules/objects in fglobals, 

614 # thus we need to make sure that we have __builtins__ as well 

615 if "__builtins__" not in func.__globals__: 

616 func.__globals__["__builtins__"] = globals()["__builtins__"] 

617 # assert id(fglobals) == id(func.__globals__) 

618 return func 

619 

620class match: 

621 """ 

622 Make avaialable a limited structural pattern matching-like syntax for Python < 3.10 

623 

624 Patterns can be only tuples (without types) currently. 

625 Inspired by the package pattern-matching-PEP634. 

626 

627 Usage: 

628 >>> with match(args) as m: 

629 >>> if m.case(('x', 'y')): 

630 >>> # use m.x and m.y 

631 >>> elif m.case(('x', 'y', 'z')): 

632 >>> # use m.x, m.y and m.z 

633 

634 Equivalent native code for Python >= 3.10: 

635 >>> match args: 

636 >>> case (x, y): 

637 >>> # use x and y 

638 >>> case (x, y, z): 

639 >>> # use x, y and z 

640 """ 

641 def __init__(self, value): 

642 self.value = value 

643 self._fields = None 

644 def __enter__(self): 

645 return self 

646 def __exit__(self, *exc_info): 

647 return False 

648 def case(self, args): # *args, **kwargs): 

649 """just handles tuple patterns""" 

650 if len(self.value) != len(args): # + len(kwargs): 

651 return False 

652 #if not all(isinstance(arg, pat) for arg, pat in zip(self.value[len(args):], kwargs.values())): 

653 # return False 

654 self.args = args # (*args, *kwargs) 

655 return True 

656 @property 

657 def fields(self): 

658 # Only bind names to values if necessary. 

659 if self._fields is None: 

660 self._fields = dict(zip(self.args, self.value)) 

661 return self._fields 

662 def __getattr__(self, item): 

663 return self.fields[item] 

664 

665ALL_CODE_PARAMS = [ 

666 # Version New attribute CodeType parameters 

667 ((3,11,'a'), 'co_endlinetable', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name qualname firstlineno linetable endlinetable columntable exceptiontable freevars cellvars'), 

668 ((3,11), 'co_exceptiontable', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name qualname firstlineno linetable exceptiontable freevars cellvars'), 

669 ((3,11,'p'), 'co_qualname', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name qualname firstlineno linetable freevars cellvars'), 

670 ((3,10), 'co_linetable', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name firstlineno linetable freevars cellvars'), 

671 ((3,8), 'co_posonlyargcount', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name firstlineno lnotab freevars cellvars'), 

672 ((3,7), 'co_kwonlyargcount', 'argcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name firstlineno lnotab freevars cellvars'), 

673 ] 

674for version, new_attr, params in ALL_CODE_PARAMS: 

675 if hasattr(CodeType, new_attr): 

676 CODE_VERSION = version 

677 CODE_PARAMS = params.split() 

678 break 

679ENCODE_PARAMS = set(CODE_PARAMS).intersection( 

680 ['code', 'lnotab', 'linetable', 'endlinetable', 'columntable', 'exceptiontable']) 

681 

682def _create_code(*args): 

683 if not isinstance(args[0], int): # co_lnotab stored from >= 3.10 

684 LNOTAB, *args = args 

685 else: # from < 3.10 (or pre-LNOTAB storage) 

686 LNOTAB = b'' 

687 

688 with match(args) as m: 

689 # Python 3.11/3.12a (18 members) 

690 if m.case(( 

691 'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6] 

692 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'qualname', 'firstlineno', # args[6:14] 

693 'linetable', 'exceptiontable', 'freevars', 'cellvars' # args[14:] 

694 )): 

695 if CODE_VERSION == (3,11): 

696 return CodeType( 

697 *args[:6], 

698 args[6].encode() if hasattr(args[6], 'encode') else args[6], # code 

699 *args[7:14], 

700 args[14].encode() if hasattr(args[14], 'encode') else args[14], # linetable 

701 args[15].encode() if hasattr(args[15], 'encode') else args[15], # exceptiontable 

702 args[16], 

703 args[17], 

704 ) 

705 fields = m.fields 

706 # PyPy 3.11 7.3.19+ (17 members) 

707 elif m.case(( 

708 'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6] 

709 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'qualname', # args[6:13] 

710 'firstlineno', 'linetable', 'freevars', 'cellvars' # args[13:] 

711 )): 

712 if CODE_VERSION == (3,11,'p'): 

713 return CodeType( 

714 *args[:6], 

715 args[6].encode() if hasattr(args[6], 'encode') else args[6], # code 

716 *args[7:14], 

717 args[14].encode() if hasattr(args[14], 'encode') else args[14], # linetable 

718 args[15], 

719 args[16], 

720 ) 

721 fields = m.fields 

722 # Python 3.10 or 3.8/3.9 (16 members) 

723 elif m.case(( 

724 'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6] 

725 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'firstlineno', # args[6:13] 

726 'LNOTAB_OR_LINETABLE', 'freevars', 'cellvars' # args[13:] 

727 )): 

728 if CODE_VERSION == (3,10) or CODE_VERSION == (3,8): 

729 return CodeType( 

730 *args[:6], 

731 args[6].encode() if hasattr(args[6], 'encode') else args[6], # code 

732 *args[7:13], 

733 args[13].encode() if hasattr(args[13], 'encode') else args[13], # lnotab/linetable 

734 args[14], 

735 args[15], 

736 ) 

737 fields = m.fields 

738 if CODE_VERSION >= (3,10): 

739 fields['linetable'] = m.LNOTAB_OR_LINETABLE 

740 else: 

741 fields['lnotab'] = LNOTAB if LNOTAB else m.LNOTAB_OR_LINETABLE 

742 # Python 3.7 (15 args) 

743 elif m.case(( 

744 'argcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:5] 

745 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'firstlineno', # args[5:12] 

746 'lnotab', 'freevars', 'cellvars' # args[12:] 

747 )): 

748 if CODE_VERSION == (3,7): 

749 return CodeType( 

750 *args[:5], 

751 args[5].encode() if hasattr(args[5], 'encode') else args[5], # code 

752 *args[6:12], 

753 args[12].encode() if hasattr(args[12], 'encode') else args[12], # lnotab 

754 args[13], 

755 args[14], 

756 ) 

757 fields = m.fields 

758 # Python 3.11a (20 members) 

759 elif m.case(( 

760 'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6] 

761 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'qualname', 'firstlineno', # args[6:14] 

762 'linetable', 'endlinetable', 'columntable', 'exceptiontable', 'freevars', 'cellvars' # args[14:] 

763 )): 

764 if CODE_VERSION == (3,11,'a'): 

765 return CodeType( 

766 *args[:6], 

767 args[6].encode() if hasattr(args[6], 'encode') else args[6], # code 

768 *args[7:14], 

769 *(a.encode() if hasattr(a, 'encode') else a for a in args[14:18]), # linetable-exceptiontable 

770 args[18], 

771 args[19], 

772 ) 

773 fields = m.fields 

774 else: 

775 raise UnpicklingError("pattern match for code object failed") 

776 

777 # The args format doesn't match this version. 

778 fields.setdefault('posonlyargcount', 0) # from python <= 3.7 

779 fields.setdefault('lnotab', LNOTAB) # from python >= 3.10 

780 fields.setdefault('linetable', b'') # from python <= 3.9 

781 fields.setdefault('qualname', fields['name']) # from python <= 3.10 

782 fields.setdefault('exceptiontable', b'') # from python <= 3.10 

783 fields.setdefault('endlinetable', None) # from python != 3.11a 

784 fields.setdefault('columntable', None) # from python != 3.11a 

785 

786 args = (fields[k].encode() if k in ENCODE_PARAMS and hasattr(fields[k], 'encode') else fields[k] 

787 for k in CODE_PARAMS) 

788 return CodeType(*args) 

789 

790def _create_ftype(ftypeobj, func, args, kwds): 

791 if kwds is None: 

792 kwds = {} 

793 if args is None: 

794 args = () 

795 return ftypeobj(func, *args, **kwds) 

796 

797def _create_typing_tuple(argz, *args): #NOTE: workaround python/cpython#94245 

798 if not argz: 

799 return typing.Tuple[()].copy_with(()) 

800 if argz == ((),): 

801 return typing.Tuple[()] 

802 return typing.Tuple[argz] 

803 

804if ThreadHandleType: 

805 def _create_thread_handle(ident, done, *args): #XXX: ignores 'blocking' 

806 from threading import _make_thread_handle 

807 handle = _make_thread_handle(ident) 

808 if done: 

809 handle._set_done() 

810 return handle 

811 

812def _create_lock(locked, *args): #XXX: ignores 'blocking' 

813 from threading import Lock 

814 lock = Lock() 

815 if locked: 

816 if not lock.acquire(False): 

817 raise UnpicklingError("Cannot acquire lock") 

818 return lock 

819 

820def _create_rlock(count, owner, *args): #XXX: ignores 'blocking' 

821 lock = RLockType() 

822 if owner is not None: 

823 lock._acquire_restore((count, owner)) 

824 if owner and not lock._is_owned(): 

825 raise UnpicklingError("Cannot acquire lock") 

826 return lock 

827 

828# thanks to matsjoyce for adding all the different file modes 

829def _create_filehandle(name, mode, position, closed, open, strictio, fmode, fdata): # buffering=0 

830 # only pickles the handle, not the file contents... good? or StringIO(data)? 

831 # (for file contents see: http://effbot.org/librarybook/copy-reg.htm) 

832 # NOTE: handle special cases first (are there more special cases?) 

833 names = {'<stdin>':sys.__stdin__, '<stdout>':sys.__stdout__, 

834 '<stderr>':sys.__stderr__} #XXX: better fileno=(0,1,2) ? 

835 if name in list(names.keys()): 

836 f = names[name] #XXX: safer "f=sys.stdin" 

837 elif name == '<tmpfile>': 

838 f = os.tmpfile() 

839 elif name == '<fdopen>': 

840 import tempfile 

841 f = tempfile.TemporaryFile(mode) 

842 else: 

843 try: 

844 exists = os.path.exists(name) 

845 except Exception: 

846 exists = False 

847 if not exists: 

848 if strictio: 

849 raise FileNotFoundError("[Errno 2] No such file or directory: '%s'" % name) 

850 elif "r" in mode and fmode != FILE_FMODE: 

851 name = '<fdopen>' # or os.devnull? 

852 current_size = 0 # or maintain position? 

853 else: 

854 current_size = os.path.getsize(name) 

855 

856 if position > current_size: 

857 if strictio: 

858 raise ValueError("invalid buffer size") 

859 elif fmode == CONTENTS_FMODE: 

860 position = current_size 

861 # try to open the file by name 

862 # NOTE: has different fileno 

863 try: 

864 #FIXME: missing: *buffering*, encoding, softspace 

865 if fmode == FILE_FMODE: 

866 f = open(name, mode if "w" in mode else "w") 

867 f.write(fdata) 

868 if "w" not in mode: 

869 f.close() 

870 f = open(name, mode) 

871 elif name == '<fdopen>': # file did not exist 

872 import tempfile 

873 f = tempfile.TemporaryFile(mode) 

874 # treat x mode as w mode 

875 elif fmode == CONTENTS_FMODE \ 

876 and ("w" in mode or "x" in mode): 

877 # stop truncation when opening 

878 flags = os.O_CREAT 

879 if "+" in mode: 

880 flags |= os.O_RDWR 

881 else: 

882 flags |= os.O_WRONLY 

883 f = os.fdopen(os.open(name, flags), mode) 

884 # set name to the correct value 

885 r = getattr(f, "buffer", f) 

886 r = getattr(r, "raw", r) 

887 r.name = name 

888 assert f.name == name 

889 else: 

890 f = open(name, mode) 

891 except (IOError, FileNotFoundError): 

892 err = sys.exc_info()[1] 

893 raise UnpicklingError(err) 

894 if closed: 

895 f.close() 

896 elif position >= 0 and fmode != HANDLE_FMODE: 

897 f.seek(position) 

898 return f 

899 

900def _create_stringi(value, position, closed): 

901 f = StringIO(value) 

902 if closed: f.close() 

903 else: f.seek(position) 

904 return f 

905 

906def _create_stringo(value, position, closed): 

907 f = StringIO() 

908 if closed: f.close() 

909 else: 

910 f.write(value) 

911 f.seek(position) 

912 return f 

913 

914class _itemgetter_helper(object): 

915 def __init__(self): 

916 self.items = [] 

917 def __getitem__(self, item): 

918 self.items.append(item) 

919 return 

920 

921class _attrgetter_helper(object): 

922 def __init__(self, attrs, index=None): 

923 self.attrs = attrs 

924 self.index = index 

925 def __getattribute__(self, attr): 

926 attrs = object.__getattribute__(self, "attrs") 

927 index = object.__getattribute__(self, "index") 

928 if index is None: 

929 index = len(attrs) 

930 attrs.append(attr) 

931 else: 

932 attrs[index] = ".".join([attrs[index], attr]) 

933 return type(self)(attrs, index) 

934 

935class _dictproxy_helper(dict): 

936 def __ror__(self, a): 

937 return a 

938 

939_dictproxy_helper_instance = _dictproxy_helper() 

940 

941__d = {} 

942try: 

943 # In CPython 3.9 and later, this trick can be used to exploit the 

944 # implementation of the __or__ function of MappingProxyType to get the true 

945 # mapping referenced by the proxy. It may work for other implementations, 

946 # but is not guaranteed. 

947 MAPPING_PROXY_TRICK = __d is (DictProxyType(__d) | _dictproxy_helper_instance) 

948except Exception: 

949 MAPPING_PROXY_TRICK = False 

950del __d 

951 

952# _CELL_REF and _CELL_EMPTY are used to stay compatible with versions of dill 

953# whose _create_cell functions do not have a default value. 

954# _CELL_REF can be safely removed entirely (replaced by empty tuples for calls 

955# to _create_cell) once breaking changes are allowed. 

956_CELL_REF = None 

957_CELL_EMPTY = Sentinel('_CELL_EMPTY') 

958 

959def _create_cell(contents=None): 

960 if contents is not _CELL_EMPTY: 

961 value = contents 

962 return (lambda: value).__closure__[0] 

963 

964def _create_weakref(obj, *args): 

965 from weakref import ref 

966 if obj is None: # it's dead 

967 from collections import UserDict 

968 return ref(UserDict(), *args) 

969 return ref(obj, *args) 

970 

971def _create_weakproxy(obj, callable=False, *args): 

972 from weakref import proxy 

973 if obj is None: # it's dead 

974 if callable: return proxy(lambda x:x, *args) 

975 from collections import UserDict 

976 return proxy(UserDict(), *args) 

977 return proxy(obj, *args) 

978 

979def _eval_repr(repr_str): 

980 return eval(repr_str) 

981 

982def _create_array(f, args, state, npdict=None): 

983 #array = numpy.core.multiarray._reconstruct(*args) 

984 array = f(*args) 

985 array.__setstate__(state) 

986 if npdict is not None: # we also have saved state in __dict__ 

987 array.__dict__.update(npdict) 

988 return array 

989 

990def _create_dtypemeta(scalar_type): 

991 if NumpyDType is True: __hook__() # a bit hacky I think 

992 if scalar_type is None: 

993 return NumpyDType 

994 return type(NumpyDType(scalar_type)) 

995 

996def _create_namedtuple(name, fieldnames, modulename, defaults=None): 

997 class_ = _import_module(modulename + '.' + name, safe=True) 

998 if class_ is not None: 

999 return class_ 

1000 import collections 

1001 t = collections.namedtuple(name, fieldnames, defaults=defaults, module=modulename) 

1002 return t 

1003 

1004def _create_capsule(pointer, name, context, destructor): 

1005 attr_found = False 

1006 try: 

1007 # based on https://github.com/python/cpython/blob/f4095e53ab708d95e019c909d5928502775ba68f/Objects/capsule.c#L209-L231 

1008 uname = name.decode('utf8') 

1009 for i in range(1, uname.count('.')+1): 

1010 names = uname.rsplit('.', i) 

1011 try: 

1012 module = __import__(names[0]) 

1013 except ImportError: 

1014 pass 

1015 obj = module 

1016 for attr in names[1:]: 

1017 obj = getattr(obj, attr) 

1018 capsule = obj 

1019 attr_found = True 

1020 break 

1021 except Exception: 

1022 pass 

1023 

1024 if attr_found: 

1025 if _PyCapsule_IsValid(capsule, name): 

1026 return capsule 

1027 raise UnpicklingError("%s object exists at %s but a PyCapsule object was expected." % (type(capsule), name)) 

1028 else: 

1029 #warnings.warn('Creating a new PyCapsule %s for a C data structure that may not be present in memory. Segmentation faults or other memory errors are possible.' % (name,), UnpicklingWarning) 

1030 capsule = _PyCapsule_New(pointer, name, destructor) 

1031 _PyCapsule_SetContext(capsule, context) 

1032 return capsule 

1033 

1034def _getattr(objclass, name, repr_str): 

1035 # hack to grab the reference directly 

1036 try: #XXX: works only for __builtin__ ? 

1037 attr = repr_str.split("'")[3] 

1038 return eval(attr+'.__dict__["'+name+'"]') 

1039 except Exception: 

1040 try: 

1041 attr = objclass.__dict__ 

1042 if type(attr) is DictProxyType: 

1043 attr = attr[name] 

1044 else: 

1045 attr = getattr(objclass,name) 

1046 except (AttributeError, KeyError): 

1047 attr = getattr(objclass,name) 

1048 return attr 

1049 

1050def _get_attr(self, name): 

1051 # stop recursive pickling 

1052 return getattr(self, name, None) or getattr(__builtin__, name) 

1053 

1054def _import_module(import_name, safe=False): 

1055 try: 

1056 if import_name.startswith('__runtime__.'): 

1057 return sys.modules[import_name] 

1058 elif '.' in import_name: 

1059 items = import_name.split('.') 

1060 module = '.'.join(items[:-1]) 

1061 obj = items[-1] 

1062 submodule = getattr(__import__(module, None, None, [obj]), obj) 

1063 if isinstance(submodule, (ModuleType, type)): 

1064 return submodule 

1065 return __import__(import_name, None, None, [obj]) 

1066 else: 

1067 return __import__(import_name) 

1068 except (ImportError, AttributeError, KeyError): 

1069 if safe: 

1070 return None 

1071 raise 

1072 

1073# https://github.com/python/cpython/blob/a8912a0f8d9eba6d502c37d522221f9933e976db/Lib/pickle.py#L322-L333 

1074def _getattribute(obj, name): 

1075 for subpath in name.split('.'): 

1076 if subpath == '<locals>': 

1077 raise AttributeError("Can't get local attribute {!r} on {!r}" 

1078 .format(name, obj)) 

1079 try: 

1080 parent = obj 

1081 obj = getattr(obj, subpath) 

1082 except AttributeError: 

1083 raise AttributeError("Can't get attribute {!r} on {!r}" 

1084 .format(name, obj)) 

1085 return obj, parent 

1086 

1087def _locate_function(obj, pickler=None): 

1088 module_name = getattr(obj, '__module__', None) 

1089 if module_name in ['__main__', None] or \ 

1090 pickler and is_dill(pickler, child=False) and pickler._session and module_name == pickler._main.__name__: 

1091 return False 

1092 if hasattr(obj, '__qualname__'): 

1093 module = _import_module(module_name, safe=True) 

1094 try: 

1095 found, _ = _getattribute(module, obj.__qualname__) 

1096 return found is obj 

1097 except AttributeError: 

1098 return False 

1099 else: 

1100 found = _import_module(module_name + '.' + obj.__name__, safe=True) 

1101 return found is obj 

1102 

1103 

1104def _setitems(dest, source): 

1105 for k, v in source.items(): 

1106 dest[k] = v 

1107 

1108 

1109def _save_with_postproc(pickler, reduction, is_pickler_dill=None, obj=Getattr.NO_DEFAULT, postproc_list=None): 

1110 if obj is Getattr.NO_DEFAULT: 

1111 obj = Reduce(reduction) # pragma: no cover 

1112 

1113 if is_pickler_dill is None: 

1114 is_pickler_dill = is_dill(pickler, child=True) 

1115 if is_pickler_dill: 

1116 # assert id(obj) not in pickler._postproc, str(obj) + ' already pushed on stack!' 

1117 # if not hasattr(pickler, 'x'): pickler.x = 0 

1118 # print(pickler.x*' ', 'push', obj, id(obj), pickler._recurse) 

1119 # pickler.x += 1 

1120 if postproc_list is None: 

1121 postproc_list = [] 

1122 

1123 # Recursive object not supported. Default to a global instead. 

1124 if id(obj) in pickler._postproc: 

1125 name = '%s.%s ' % (obj.__module__, getattr(obj, '__qualname__', obj.__name__)) if hasattr(obj, '__module__') else '' 

1126 warnings.warn('Cannot pickle %r: %shas recursive self-references that trigger a RecursionError.' % (obj, name), PicklingWarning) 

1127 pickler.save_global(obj) 

1128 return 

1129 pickler._postproc[id(obj)] = postproc_list 

1130 

1131 # TODO: Use state_setter in Python 3.8 to allow for faster cPickle implementations 

1132 pickler.save_reduce(*reduction, obj=obj) 

1133 

1134 if is_pickler_dill: 

1135 # pickler.x -= 1 

1136 # print(pickler.x*' ', 'pop', obj, id(obj)) 

1137 postproc = pickler._postproc.pop(id(obj)) 

1138 # assert postproc_list == postproc, 'Stack tampered!' 

1139 for reduction in reversed(postproc): 

1140 if reduction[0] is _setitems: 

1141 # use the internal machinery of pickle.py to speedup when 

1142 # updating a dictionary in postproc 

1143 dest, source = reduction[1] 

1144 if source: 

1145 pickler.write(pickler.get(pickler.memo[id(dest)][0])) 

1146 if sys.hexversion < 0x30e00a1: 

1147 pickler._batch_setitems(iter(source.items())) 

1148 else: 

1149 pickler._batch_setitems(iter(source.items()), obj=obj) 

1150 else: 

1151 # Updating with an empty dictionary. Same as doing nothing. 

1152 continue 

1153 else: 

1154 pickler.save_reduce(*reduction) 

1155 # pop None created by calling preprocessing step off stack 

1156 pickler.write(POP) 

1157 

1158#@register(CodeType) 

1159#def save_code(pickler, obj): 

1160# logger.trace(pickler, "Co: %s", obj) 

1161# pickler.save_reduce(_unmarshal, (marshal.dumps(obj),), obj=obj) 

1162# logger.trace(pickler, "# Co") 

1163# return 

1164 

1165# The following function is based on 'save_codeobject' from 'cloudpickle' 

1166# Copyright (c) 2012, Regents of the University of California. 

1167# Copyright (c) 2009 `PiCloud, Inc. <http://www.picloud.com>`_. 

1168# License: https://github.com/cloudpipe/cloudpickle/blob/master/LICENSE 

1169@register(CodeType) 

1170def save_code(pickler, obj): 

1171 logger.trace(pickler, "Co: %s", obj) 

1172 if hasattr(obj, "co_endlinetable"): # python 3.11a (20 args) 

1173 args = ( 

1174 obj.co_lnotab, # for < python 3.10 [not counted in args] 

1175 obj.co_argcount, obj.co_posonlyargcount, 

1176 obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize, 

1177 obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, 

1178 obj.co_varnames, obj.co_filename, obj.co_name, obj.co_qualname, 

1179 obj.co_firstlineno, obj.co_linetable, obj.co_endlinetable, 

1180 obj.co_columntable, obj.co_exceptiontable, obj.co_freevars, 

1181 obj.co_cellvars 

1182 ) 

1183 elif hasattr(obj, "co_exceptiontable"): # python 3.11 (18 args) 

1184 with warnings.catch_warnings(): 

1185 if not OLD312a7: # issue 597 

1186 warnings.filterwarnings('ignore', category=DeprecationWarning) 

1187 args = ( 

1188 obj.co_lnotab, # for < python 3.10 [not counted in args] 

1189 obj.co_argcount, obj.co_posonlyargcount, 

1190 obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize, 

1191 obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, 

1192 obj.co_varnames, obj.co_filename, obj.co_name, obj.co_qualname, 

1193 obj.co_firstlineno, obj.co_linetable, obj.co_exceptiontable, 

1194 obj.co_freevars, obj.co_cellvars 

1195 ) 

1196 elif hasattr(obj, "co_qualname"): # pypy 3.11 7.3.19+ (17 args) 

1197 args = ( 

1198 obj.co_lnotab, obj.co_argcount, obj.co_posonlyargcount, 

1199 obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize, 

1200 obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, 

1201 obj.co_varnames, obj.co_filename, obj.co_name, obj.co_qualname, 

1202 obj.co_firstlineno, obj.co_linetable, obj.co_freevars, 

1203 obj.co_cellvars 

1204 ) 

1205 elif hasattr(obj, "co_linetable"): # python 3.10 (16 args) 

1206 args = ( 

1207 obj.co_lnotab, # for < python 3.10 [not counted in args] 

1208 obj.co_argcount, obj.co_posonlyargcount, 

1209 obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize, 

1210 obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, 

1211 obj.co_varnames, obj.co_filename, obj.co_name, 

1212 obj.co_firstlineno, obj.co_linetable, obj.co_freevars, 

1213 obj.co_cellvars 

1214 ) 

1215 elif hasattr(obj, "co_posonlyargcount"): # python 3.8 (16 args) 

1216 args = ( 

1217 obj.co_argcount, obj.co_posonlyargcount, 

1218 obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize, 

1219 obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, 

1220 obj.co_varnames, obj.co_filename, obj.co_name, 

1221 obj.co_firstlineno, obj.co_lnotab, obj.co_freevars, 

1222 obj.co_cellvars 

1223 ) 

1224 else: # python 3.7 (15 args) 

1225 args = ( 

1226 obj.co_argcount, obj.co_kwonlyargcount, obj.co_nlocals, 

1227 obj.co_stacksize, obj.co_flags, obj.co_code, obj.co_consts, 

1228 obj.co_names, obj.co_varnames, obj.co_filename, 

1229 obj.co_name, obj.co_firstlineno, obj.co_lnotab, 

1230 obj.co_freevars, obj.co_cellvars 

1231 ) 

1232 

1233 pickler.save_reduce(_create_code, args, obj=obj) 

1234 logger.trace(pickler, "# Co") 

1235 return 

1236 

1237def _repr_dict(obj): 

1238 """Make a short string representation of a dictionary.""" 

1239 return "<%s object at %#012x>" % (type(obj).__name__, id(obj)) 

1240 

1241@register(dict) 

1242def save_module_dict(pickler, obj): 

1243 if is_dill(pickler, child=False) and obj == pickler._main.__dict__ and \ 

1244 not (pickler._session and pickler._first_pass): 

1245 logger.trace(pickler, "D1: %s", _repr_dict(obj)) # obj 

1246 pickler.write(bytes('c__builtin__\n__main__\n', 'UTF-8')) 

1247 logger.trace(pickler, "# D1") 

1248 elif (not is_dill(pickler, child=False)) and (obj == _main_module.__dict__): 

1249 logger.trace(pickler, "D3: %s", _repr_dict(obj)) # obj 

1250 pickler.write(bytes('c__main__\n__dict__\n', 'UTF-8')) #XXX: works in general? 

1251 logger.trace(pickler, "# D3") 

1252 elif '__name__' in obj and obj != _main_module.__dict__ \ 

1253 and type(obj['__name__']) is str \ 

1254 and obj is getattr(_import_module(obj['__name__'],True), '__dict__', None): 

1255 logger.trace(pickler, "D4: %s", _repr_dict(obj)) # obj 

1256 pickler.write(bytes('c%s\n__dict__\n' % obj['__name__'], 'UTF-8')) 

1257 logger.trace(pickler, "# D4") 

1258 else: 

1259 logger.trace(pickler, "D2: %s", _repr_dict(obj)) # obj 

1260 if is_dill(pickler, child=False) and pickler._session: 

1261 # we only care about session the first pass thru 

1262 pickler._first_pass = False 

1263 StockPickler.save_dict(pickler, obj) 

1264 logger.trace(pickler, "# D2") 

1265 return 

1266 

1267 

1268if not OLD310 and MAPPING_PROXY_TRICK: 

1269 def save_dict_view(dicttype): 

1270 def save_dict_view_for_function(func): 

1271 def _save_dict_view(pickler, obj): 

1272 logger.trace(pickler, "Dkvi: <%s>", obj) 

1273 mapping = obj.mapping | _dictproxy_helper_instance 

1274 pickler.save_reduce(func, (mapping,), obj=obj) 

1275 logger.trace(pickler, "# Dkvi") 

1276 return _save_dict_view 

1277 return [ 

1278 (funcname, save_dict_view_for_function(getattr(dicttype, funcname))) 

1279 for funcname in ('keys', 'values', 'items') 

1280 ] 

1281else: 

1282 # The following functions are based on 'cloudpickle' 

1283 # https://github.com/cloudpipe/cloudpickle/blob/5d89947288a18029672596a4d719093cc6d5a412/cloudpickle/cloudpickle.py#L922-L940 

1284 # Copyright (c) 2012, Regents of the University of California. 

1285 # Copyright (c) 2009 `PiCloud, Inc. <http://www.picloud.com>`_. 

1286 # License: https://github.com/cloudpipe/cloudpickle/blob/master/LICENSE 

1287 def save_dict_view(dicttype): 

1288 def save_dict_keys(pickler, obj): 

1289 logger.trace(pickler, "Dk: <%s>", obj) 

1290 dict_constructor = _shims.Reduce(dicttype.fromkeys, (list(obj),)) 

1291 pickler.save_reduce(dicttype.keys, (dict_constructor,), obj=obj) 

1292 logger.trace(pickler, "# Dk") 

1293 

1294 def save_dict_values(pickler, obj): 

1295 logger.trace(pickler, "Dv: <%s>", obj) 

1296 dict_constructor = _shims.Reduce(dicttype, (enumerate(obj),)) 

1297 pickler.save_reduce(dicttype.values, (dict_constructor,), obj=obj) 

1298 logger.trace(pickler, "# Dv") 

1299 

1300 def save_dict_items(pickler, obj): 

1301 logger.trace(pickler, "Di: <%s>", obj) 

1302 pickler.save_reduce(dicttype.items, (dicttype(obj),), obj=obj) 

1303 logger.trace(pickler, "# Di") 

1304 

1305 return ( 

1306 ('keys', save_dict_keys), 

1307 ('values', save_dict_values), 

1308 ('items', save_dict_items) 

1309 ) 

1310 

1311for __dicttype in ( 

1312 dict, 

1313 OrderedDict 

1314): 

1315 __obj = __dicttype() 

1316 for __funcname, __savefunc in save_dict_view(__dicttype): 

1317 __tview = type(getattr(__obj, __funcname)()) 

1318 if __tview not in Pickler.dispatch: 

1319 Pickler.dispatch[__tview] = __savefunc 

1320del __dicttype, __obj, __funcname, __tview, __savefunc 

1321 

1322 

1323@register(ClassType) 

1324def save_classobj(pickler, obj): #FIXME: enable pickler._byref 

1325 if not _locate_function(obj, pickler): 

1326 logger.trace(pickler, "C1: %s", obj) 

1327 pickler.save_reduce(ClassType, (obj.__name__, obj.__bases__, 

1328 obj.__dict__), obj=obj) 

1329 #XXX: or obj.__dict__.copy()), obj=obj) ? 

1330 logger.trace(pickler, "# C1") 

1331 else: 

1332 logger.trace(pickler, "C2: %s", obj) 

1333 name = getattr(obj, '__qualname__', getattr(obj, '__name__', None)) 

1334 StockPickler.save_global(pickler, obj, name=name) 

1335 logger.trace(pickler, "# C2") 

1336 return 

1337 

1338@register(typing._GenericAlias) 

1339def save_generic_alias(pickler, obj): 

1340 args = obj.__args__ 

1341 if type(obj.__reduce__()) is str: 

1342 logger.trace(pickler, "Ga0: %s", obj) 

1343 StockPickler.save_global(pickler, obj, name=obj.__reduce__()) 

1344 logger.trace(pickler, "# Ga0") 

1345 elif obj.__origin__ is tuple and (not args or args == ((),)): 

1346 logger.trace(pickler, "Ga1: %s", obj) 

1347 pickler.save_reduce(_create_typing_tuple, (args,), obj=obj) 

1348 logger.trace(pickler, "# Ga1") 

1349 else: 

1350 logger.trace(pickler, "Ga2: %s", obj) 

1351 StockPickler.save_reduce(pickler, *obj.__reduce__(), obj=obj) 

1352 logger.trace(pickler, "# Ga2") 

1353 return 

1354 

1355if ThreadHandleType: 

1356 @register(ThreadHandleType) 

1357 def save_thread_handle(pickler, obj): 

1358 logger.trace(pickler, "Th: %s", obj) 

1359 pickler.save_reduce(_create_thread_handle, (obj.ident, obj.is_done()), obj=obj) 

1360 logger.trace(pickler, "# Th") 

1361 return 

1362 

1363@register(LockType) #XXX: copied Thread will have new Event (due to new Lock) 

1364def save_lock(pickler, obj): 

1365 logger.trace(pickler, "Lo: %s", obj) 

1366 pickler.save_reduce(_create_lock, (obj.locked(),), obj=obj) 

1367 logger.trace(pickler, "# Lo") 

1368 return 

1369 

1370@register(RLockType) 

1371def save_rlock(pickler, obj): 

1372 logger.trace(pickler, "RL: %s", obj) 

1373 r = obj.__repr__() # don't use _release_save as it unlocks the lock 

1374 count = int(r.split('count=')[1].split()[0].rstrip('>')) 

1375 owner = int(r.split('owner=')[1].split()[0]) 

1376 pickler.save_reduce(_create_rlock, (count,owner,), obj=obj) 

1377 logger.trace(pickler, "# RL") 

1378 return 

1379 

1380#@register(SocketType) #FIXME: causes multiprocess test_pickling FAIL 

1381def save_socket(pickler, obj): 

1382 logger.trace(pickler, "So: %s", obj) 

1383 pickler.save_reduce(*reduce_socket(obj)) 

1384 logger.trace(pickler, "# So") 

1385 return 

1386 

1387def _save_file(pickler, obj, open_): 

1388 if obj.closed: 

1389 position = 0 

1390 else: 

1391 obj.flush() 

1392 if obj in (sys.__stdout__, sys.__stderr__, sys.__stdin__): 

1393 position = -1 

1394 else: 

1395 position = obj.tell() 

1396 if is_dill(pickler, child=True) and pickler._fmode == FILE_FMODE: 

1397 f = open_(obj.name, "r") 

1398 fdata = f.read() 

1399 f.close() 

1400 else: 

1401 fdata = "" 

1402 if is_dill(pickler, child=True): 

1403 strictio = pickler._strictio 

1404 fmode = pickler._fmode 

1405 else: 

1406 strictio = False 

1407 fmode = 0 # HANDLE_FMODE 

1408 pickler.save_reduce(_create_filehandle, (obj.name, obj.mode, position, 

1409 obj.closed, open_, strictio, 

1410 fmode, fdata), obj=obj) 

1411 return 

1412 

1413 

1414@register(FileType) #XXX: in 3.x has buffer=0, needs different _create? 

1415@register(BufferedReaderType) 

1416@register(BufferedWriterType) 

1417@register(TextWrapperType) 

1418def save_file(pickler, obj): 

1419 logger.trace(pickler, "Fi: %s", obj) 

1420 f = _save_file(pickler, obj, open) 

1421 logger.trace(pickler, "# Fi") 

1422 return f 

1423 

1424if BufferedRandomType: 

1425 @register(BufferedRandomType) 

1426 def save_file(pickler, obj): 

1427 logger.trace(pickler, "Fi: %s", obj) 

1428 f = _save_file(pickler, obj, open) 

1429 logger.trace(pickler, "# Fi") 

1430 return f 

1431 

1432if PyTextWrapperType: 

1433 @register(PyBufferedReaderType) 

1434 @register(PyBufferedWriterType) 

1435 @register(PyTextWrapperType) 

1436 def save_file(pickler, obj): 

1437 logger.trace(pickler, "Fi: %s", obj) 

1438 f = _save_file(pickler, obj, _open) 

1439 logger.trace(pickler, "# Fi") 

1440 return f 

1441 

1442 if PyBufferedRandomType: 

1443 @register(PyBufferedRandomType) 

1444 def save_file(pickler, obj): 

1445 logger.trace(pickler, "Fi: %s", obj) 

1446 f = _save_file(pickler, obj, _open) 

1447 logger.trace(pickler, "# Fi") 

1448 return f 

1449 

1450 

1451# The following two functions are based on 'saveCStringIoInput' 

1452# and 'saveCStringIoOutput' from spickle 

1453# Copyright (c) 2011 by science+computing ag 

1454# License: http://www.apache.org/licenses/LICENSE-2.0 

1455if InputType: 

1456 @register(InputType) 

1457 def save_stringi(pickler, obj): 

1458 logger.trace(pickler, "Io: %s", obj) 

1459 if obj.closed: 

1460 value = ''; position = 0 

1461 else: 

1462 value = obj.getvalue(); position = obj.tell() 

1463 pickler.save_reduce(_create_stringi, (value, position, \ 

1464 obj.closed), obj=obj) 

1465 logger.trace(pickler, "# Io") 

1466 return 

1467 

1468 @register(OutputType) 

1469 def save_stringo(pickler, obj): 

1470 logger.trace(pickler, "Io: %s", obj) 

1471 if obj.closed: 

1472 value = ''; position = 0 

1473 else: 

1474 value = obj.getvalue(); position = obj.tell() 

1475 pickler.save_reduce(_create_stringo, (value, position, \ 

1476 obj.closed), obj=obj) 

1477 logger.trace(pickler, "# Io") 

1478 return 

1479 

1480if LRUCacheType is not None: 

1481 from functools import lru_cache 

1482 @register(LRUCacheType) 

1483 def save_lru_cache(pickler, obj): 

1484 logger.trace(pickler, "LRU: %s", obj) 

1485 if OLD39: 

1486 kwargs = obj.cache_info() 

1487 args = (kwargs.maxsize,) 

1488 else: 

1489 kwargs = obj.cache_parameters() 

1490 args = (kwargs['maxsize'], kwargs['typed']) 

1491 if args != lru_cache.__defaults__: 

1492 wrapper = Reduce(lru_cache, args, is_callable=True) 

1493 else: 

1494 wrapper = lru_cache 

1495 pickler.save_reduce(wrapper, (obj.__wrapped__,), obj=obj) 

1496 logger.trace(pickler, "# LRU") 

1497 return 

1498 

1499@register(SuperType) 

1500def save_super(pickler, obj): 

1501 logger.trace(pickler, "Su: %s", obj) 

1502 pickler.save_reduce(super, (obj.__thisclass__, obj.__self__), obj=obj) 

1503 logger.trace(pickler, "# Su") 

1504 return 

1505 

1506if IS_PYPY: 

1507 @register(MethodType) 

1508 def save_instancemethod0(pickler, obj): 

1509 code = getattr(obj.__func__, '__code__', None) 

1510 if code is not None and type(code) is not CodeType \ 

1511 and getattr(obj.__self__, obj.__name__) == obj: 

1512 # Some PyPy builtin functions have no module name 

1513 logger.trace(pickler, "Me2: %s", obj) 

1514 # TODO: verify that this works for all PyPy builtin methods 

1515 pickler.save_reduce(getattr, (obj.__self__, obj.__name__), obj=obj) 

1516 logger.trace(pickler, "# Me2") 

1517 return 

1518 

1519 logger.trace(pickler, "Me1: %s", obj) 

1520 pickler.save_reduce(MethodType, (obj.__func__, obj.__self__), obj=obj) 

1521 logger.trace(pickler, "# Me1") 

1522 return 

1523else: 

1524 @register(MethodType) 

1525 def save_instancemethod0(pickler, obj): 

1526 logger.trace(pickler, "Me1: %s", obj) 

1527 pickler.save_reduce(MethodType, (obj.__func__, obj.__self__), obj=obj) 

1528 logger.trace(pickler, "# Me1") 

1529 return 

1530 

1531if not IS_PYPY: 

1532 @register(MemberDescriptorType) 

1533 @register(GetSetDescriptorType) 

1534 @register(MethodDescriptorType) 

1535 @register(WrapperDescriptorType) 

1536 @register(ClassMethodDescriptorType) 

1537 def save_wrapper_descriptor(pickler, obj): 

1538 logger.trace(pickler, "Wr: %s", obj) 

1539 pickler.save_reduce(_getattr, (obj.__objclass__, obj.__name__, 

1540 obj.__repr__()), obj=obj) 

1541 logger.trace(pickler, "# Wr") 

1542 return 

1543else: 

1544 @register(MemberDescriptorType) 

1545 @register(GetSetDescriptorType) 

1546 def save_wrapper_descriptor(pickler, obj): 

1547 logger.trace(pickler, "Wr: %s", obj) 

1548 pickler.save_reduce(_getattr, (obj.__objclass__, obj.__name__, 

1549 obj.__repr__()), obj=obj) 

1550 logger.trace(pickler, "# Wr") 

1551 return 

1552 

1553@register(CellType) 

1554def save_cell(pickler, obj): 

1555 try: 

1556 f = obj.cell_contents 

1557 except ValueError: # cell is empty 

1558 logger.trace(pickler, "Ce3: %s", obj) 

1559 # _shims._CELL_EMPTY is defined in _shims.py to support PyPy 2.7. 

1560 # It unpickles to a sentinel object _dill._CELL_EMPTY, also created in 

1561 # _shims.py. This object is not present in Python 3 because the cell's 

1562 # contents can be deleted in newer versions of Python. The reduce object 

1563 # will instead unpickle to None if unpickled in Python 3. 

1564 

1565 # When breaking changes are made to dill, (_shims._CELL_EMPTY,) can 

1566 # be replaced by () OR the delattr function can be removed repending on 

1567 # whichever is more convienient. 

1568 pickler.save_reduce(_create_cell, (_shims._CELL_EMPTY,), obj=obj) 

1569 # Call the function _delattr on the cell's cell_contents attribute 

1570 # The result of this function call will be None 

1571 pickler.save_reduce(_shims._delattr, (obj, 'cell_contents')) 

1572 # pop None created by calling _delattr off stack 

1573 pickler.write(POP) 

1574 logger.trace(pickler, "# Ce3") 

1575 return 

1576 if is_dill(pickler, child=True): 

1577 if id(f) in pickler._postproc: 

1578 # Already seen. Add to its postprocessing. 

1579 postproc = pickler._postproc[id(f)] 

1580 else: 

1581 # Haven't seen it. Add to the highest possible object and set its 

1582 # value as late as possible to prevent cycle. 

1583 postproc = next(iter(pickler._postproc.values()), None) 

1584 if postproc is not None: 

1585 logger.trace(pickler, "Ce2: %s", obj) 

1586 # _CELL_REF is defined in _shims.py to support older versions of 

1587 # dill. When breaking changes are made to dill, (_CELL_REF,) can 

1588 # be replaced by () 

1589 pickler.save_reduce(_create_cell, (_CELL_REF,), obj=obj) 

1590 postproc.append((_shims._setattr, (obj, 'cell_contents', f))) 

1591 logger.trace(pickler, "# Ce2") 

1592 return 

1593 logger.trace(pickler, "Ce1: %s", obj) 

1594 pickler.save_reduce(_create_cell, (f,), obj=obj) 

1595 logger.trace(pickler, "# Ce1") 

1596 return 

1597 

1598if MAPPING_PROXY_TRICK: 

1599 @register(DictProxyType) 

1600 def save_dictproxy(pickler, obj): 

1601 logger.trace(pickler, "Mp: %s", _repr_dict(obj)) # obj 

1602 mapping = obj | _dictproxy_helper_instance 

1603 pickler.save_reduce(DictProxyType, (mapping,), obj=obj) 

1604 logger.trace(pickler, "# Mp") 

1605 return 

1606else: 

1607 @register(DictProxyType) 

1608 def save_dictproxy(pickler, obj): 

1609 logger.trace(pickler, "Mp: %s", _repr_dict(obj)) # obj 

1610 pickler.save_reduce(DictProxyType, (obj.copy(),), obj=obj) 

1611 logger.trace(pickler, "# Mp") 

1612 return 

1613 

1614@register(SliceType) 

1615def save_slice(pickler, obj): 

1616 logger.trace(pickler, "Sl: %s", obj) 

1617 pickler.save_reduce(slice, (obj.start, obj.stop, obj.step), obj=obj) 

1618 logger.trace(pickler, "# Sl") 

1619 return 

1620 

1621@register(XRangeType) 

1622@register(EllipsisType) 

1623@register(NotImplementedType) 

1624def save_singleton(pickler, obj): 

1625 logger.trace(pickler, "Si: %s", obj) 

1626 pickler.save_reduce(_eval_repr, (obj.__repr__(),), obj=obj) 

1627 logger.trace(pickler, "# Si") 

1628 return 

1629 

1630def _proxy_helper(obj): # a dead proxy returns a reference to None 

1631 """get memory address of proxy's reference object""" 

1632 _repr = repr(obj) 

1633 try: _str = str(obj) 

1634 except ReferenceError: # it's a dead proxy 

1635 return id(None) 

1636 if _str == _repr: return id(obj) # it's a repr 

1637 try: # either way, it's a proxy from here 

1638 address = int(_str.rstrip('>').split(' at ')[-1], base=16) 

1639 except ValueError: # special case: proxy of a 'type' 

1640 if not IS_PYPY: 

1641 address = int(_repr.rstrip('>').split(' at ')[-1], base=16) 

1642 else: 

1643 objects = iter(gc.get_objects()) 

1644 for _obj in objects: 

1645 if repr(_obj) == _str: return id(_obj) 

1646 # all bad below... nothing found so throw ReferenceError 

1647 msg = "Cannot reference object for proxy at '%s'" % id(obj) 

1648 raise ReferenceError(msg) 

1649 return address 

1650 

1651def _locate_object(address, module=None): 

1652 """get object located at the given memory address (inverse of id(obj))""" 

1653 special = [None, True, False] #XXX: more...? 

1654 for obj in special: 

1655 if address == id(obj): return obj 

1656 if module: 

1657 objects = iter(module.__dict__.values()) 

1658 else: objects = iter(gc.get_objects()) 

1659 for obj in objects: 

1660 if address == id(obj): return obj 

1661 # all bad below... nothing found so throw ReferenceError or TypeError 

1662 try: address = hex(address) 

1663 except TypeError: 

1664 raise TypeError("'%s' is not a valid memory address" % str(address)) 

1665 raise ReferenceError("Cannot reference object at '%s'" % address) 

1666 

1667@register(ReferenceType) 

1668def save_weakref(pickler, obj): 

1669 refobj = obj() 

1670 logger.trace(pickler, "R1: %s", obj) 

1671 #refobj = ctypes.pythonapi.PyWeakref_GetObject(obj) # dead returns "None" 

1672 pickler.save_reduce(_create_weakref, (refobj,), obj=obj) 

1673 logger.trace(pickler, "# R1") 

1674 return 

1675 

1676@register(ProxyType) 

1677@register(CallableProxyType) 

1678def save_weakproxy(pickler, obj): 

1679 # Must do string substitution here and use %r to avoid ReferenceError. 

1680 logger.trace(pickler, "R2: %r" % obj) 

1681 refobj = _locate_object(_proxy_helper(obj)) 

1682 pickler.save_reduce(_create_weakproxy, (refobj, callable(obj)), obj=obj) 

1683 logger.trace(pickler, "# R2") 

1684 return 

1685 

1686def _is_builtin_module(module): 

1687 if not hasattr(module, "__file__"): return True 

1688 if module.__file__ is None: return False 

1689 # If a module file name starts with prefix, it should be a builtin 

1690 # module, so should always be pickled as a reference. 

1691 names = ["base_prefix", "base_exec_prefix", "exec_prefix", "prefix", "real_prefix"] 

1692 rp = os.path.realpath 

1693 # See https://github.com/uqfoundation/dill/issues/566 

1694 return ( 

1695 any( 

1696 module.__file__.startswith(getattr(sys, name)) 

1697 or rp(module.__file__).startswith(rp(getattr(sys, name))) 

1698 for name in names 

1699 if hasattr(sys, name) 

1700 ) 

1701 or module.__file__.endswith(EXTENSION_SUFFIXES) 

1702 or 'site-packages' in module.__file__ 

1703 ) 

1704 

1705def _is_imported_module(module): 

1706 return getattr(module, '__loader__', None) is not None or module in sys.modules.values() 

1707 

1708@register(ModuleType) 

1709def save_module(pickler, obj): 

1710 if False: #_use_diff: 

1711 if obj.__name__.split('.', 1)[0] != "dill": 

1712 try: 

1713 changed = diff.whats_changed(obj, seen=pickler._diff_cache)[0] 

1714 except RuntimeError: # not memorised module, probably part of dill 

1715 pass 

1716 else: 

1717 logger.trace(pickler, "M2: %s with diff", obj) 

1718 logger.info("Diff: %s", changed.keys()) 

1719 pickler.save_reduce(_import_module, (obj.__name__,), obj=obj, 

1720 state=changed) 

1721 logger.trace(pickler, "# M2") 

1722 return 

1723 

1724 logger.trace(pickler, "M1: %s", obj) 

1725 pickler.save_reduce(_import_module, (obj.__name__,), obj=obj) 

1726 logger.trace(pickler, "# M1") 

1727 else: 

1728 builtin_mod = _is_builtin_module(obj) 

1729 is_session_main = is_dill(pickler, child=True) and obj is pickler._main 

1730 if (obj.__name__ not in ("builtins", "dill", "dill._dill") and not builtin_mod 

1731 or is_session_main): 

1732 logger.trace(pickler, "M1: %s", obj) 

1733 # Hack for handling module-type objects in load_module(). 

1734 mod_name = obj.__name__ if _is_imported_module(obj) else '__runtime__.%s' % obj.__name__ 

1735 # Second references are saved as __builtin__.__main__ in save_module_dict(). 

1736 main_dict = obj.__dict__.copy() 

1737 for item in ('__builtins__', '__loader__'): 

1738 main_dict.pop(item, None) 

1739 for item in IPYTHON_SINGLETONS: #pragma: no cover 

1740 if getattr(main_dict.get(item), '__module__', '').startswith('IPython'): 

1741 del main_dict[item] 

1742 pickler.save_reduce(_import_module, (mod_name,), obj=obj, state=main_dict) 

1743 logger.trace(pickler, "# M1") 

1744 elif obj.__name__ == "dill._dill": 

1745 logger.trace(pickler, "M2: %s", obj) 

1746 pickler.save_global(obj, name="_dill") 

1747 logger.trace(pickler, "# M2") 

1748 else: 

1749 logger.trace(pickler, "M2: %s", obj) 

1750 pickler.save_reduce(_import_module, (obj.__name__,), obj=obj) 

1751 logger.trace(pickler, "# M2") 

1752 return 

1753 

1754# The following function is based on '_extract_class_dict' from 'cloudpickle' 

1755# Copyright (c) 2012, Regents of the University of California. 

1756# Copyright (c) 2009 `PiCloud, Inc. <http://www.picloud.com>`_. 

1757# License: https://github.com/cloudpipe/cloudpickle/blob/master/LICENSE 

1758def _get_typedict_type(cls, clsdict, attrs, postproc_list): 

1759 """Retrieve a copy of the dict of a class without the inherited methods""" 

1760 if len(cls.__bases__) == 1: 

1761 inherited_dict = cls.__bases__[0].__dict__ 

1762 else: 

1763 inherited_dict = {} 

1764 for base in reversed(cls.__bases__): 

1765 inherited_dict.update(base.__dict__) 

1766 to_remove = [] 

1767 for name, value in dict.items(clsdict): 

1768 try: 

1769 base_value = inherited_dict[name] 

1770 if value is base_value and hasattr(value, '__qualname__'): 

1771 to_remove.append(name) 

1772 except KeyError: 

1773 pass 

1774 for name in to_remove: 

1775 dict.pop(clsdict, name) 

1776 

1777 if issubclass(type(cls), type): 

1778 clsdict.pop('__dict__', None) 

1779 clsdict.pop('__weakref__', None) 

1780 # clsdict.pop('__prepare__', None) 

1781 return clsdict, attrs 

1782 

1783def _get_typedict_abc(obj, _dict, attrs, postproc_list): 

1784 if hasattr(abc, '_get_dump'): 

1785 (registry, _, _, _) = abc._get_dump(obj) 

1786 register = obj.register 

1787 postproc_list.extend((register, (reg(),)) for reg in registry) 

1788 elif hasattr(obj, '_abc_registry'): 

1789 registry = obj._abc_registry 

1790 register = obj.register 

1791 postproc_list.extend((register, (reg,)) for reg in registry) 

1792 else: 

1793 raise PicklingError("Cannot find registry of ABC %s", obj) 

1794 

1795 if '_abc_registry' in _dict: 

1796 _dict.pop('_abc_registry', None) 

1797 _dict.pop('_abc_cache', None) 

1798 _dict.pop('_abc_negative_cache', None) 

1799 # _dict.pop('_abc_negative_cache_version', None) 

1800 else: 

1801 _dict.pop('_abc_impl', None) 

1802 return _dict, attrs 

1803 

1804@register(TypeType) 

1805def save_type(pickler, obj, postproc_list=None): 

1806 if obj in _typemap: 

1807 logger.trace(pickler, "T1: %s", obj) 

1808 # if obj in _incedental_types: 

1809 # warnings.warn('Type %r may only exist on this implementation of Python and cannot be unpickled in other implementations.' % (obj,), PicklingWarning) 

1810 pickler.save_reduce(_load_type, (_typemap[obj],), obj=obj) 

1811 logger.trace(pickler, "# T1") 

1812 elif obj.__bases__ == (tuple,) and all([hasattr(obj, attr) for attr in ('_fields','_asdict','_make','_replace')]): 

1813 # special case: namedtuples 

1814 logger.trace(pickler, "T6: %s", obj) 

1815 

1816 obj_name = getattr(obj, '__qualname__', getattr(obj, '__name__', None)) 

1817 if obj.__name__ != obj_name: 

1818 if postproc_list is None: 

1819 postproc_list = [] 

1820 postproc_list.append((setattr, (obj, '__qualname__', obj_name))) 

1821 

1822 if not obj._field_defaults: 

1823 _save_with_postproc(pickler, (_create_namedtuple, (obj.__name__, obj._fields, obj.__module__)), obj=obj, postproc_list=postproc_list) 

1824 else: 

1825 defaults = [obj._field_defaults[field] for field in obj._fields if field in obj._field_defaults] 

1826 _save_with_postproc(pickler, (_create_namedtuple, (obj.__name__, obj._fields, obj.__module__, defaults)), obj=obj, postproc_list=postproc_list) 

1827 logger.trace(pickler, "# T6") 

1828 return 

1829 

1830 # special caes: NoneType, NotImplementedType, EllipsisType, EnumMeta, etc 

1831 elif obj is type(None): 

1832 logger.trace(pickler, "T7: %s", obj) 

1833 #XXX: pickler.save_reduce(type, (None,), obj=obj) 

1834 pickler.write(GLOBAL + b'__builtin__\nNoneType\n') 

1835 logger.trace(pickler, "# T7") 

1836 elif obj is NotImplementedType: 

1837 logger.trace(pickler, "T7: %s", obj) 

1838 pickler.save_reduce(type, (NotImplemented,), obj=obj) 

1839 logger.trace(pickler, "# T7") 

1840 elif obj is EllipsisType: 

1841 logger.trace(pickler, "T7: %s", obj) 

1842 pickler.save_reduce(type, (Ellipsis,), obj=obj) 

1843 logger.trace(pickler, "# T7") 

1844 elif obj is EnumMeta: 

1845 logger.trace(pickler, "T7: %s", obj) 

1846 pickler.write(GLOBAL + b'enum\nEnumMeta\n') 

1847 logger.trace(pickler, "# T7") 

1848 elif obj is ExceptHookArgsType: #NOTE: must be after NoneType for pypy 

1849 logger.trace(pickler, "T7: %s", obj) 

1850 pickler.write(GLOBAL + b'threading\nExceptHookArgs\n') 

1851 logger.trace(pickler, "# T7") 

1852 

1853 else: 

1854 _byref = getattr(pickler, '_byref', None) 

1855 obj_recursive = id(obj) in getattr(pickler, '_postproc', ()) 

1856 incorrectly_named = not _locate_function(obj, pickler) 

1857 if not _byref and not obj_recursive and incorrectly_named: # not a function, but the name was held over 

1858 if postproc_list is None: 

1859 postproc_list = [] 

1860 

1861 # thanks to Tom Stepleton pointing out pickler._session unneeded 

1862 logger.trace(pickler, "T2: %s", obj) 

1863 _dict, attrs = _get_typedict_type(obj, obj.__dict__.copy(), None, postproc_list) # copy dict proxy to a dict 

1864 

1865 #print (_dict) 

1866 #print ("%s\n%s" % (type(obj), obj.__name__)) 

1867 #print ("%s\n%s" % (obj.__bases__, obj.__dict__)) 

1868 slots = _dict.get('__slots__', ()) 

1869 if type(slots) == str: 

1870 # __slots__ accepts a single string 

1871 slots = (slots,) 

1872 

1873 for name in slots: 

1874 _dict.pop(name, None) 

1875 

1876 if isinstance(obj, abc.ABCMeta): 

1877 logger.trace(pickler, "ABC: %s", obj) 

1878 _dict, attrs = _get_typedict_abc(obj, _dict, attrs, postproc_list) 

1879 logger.trace(pickler, "# ABC") 

1880 

1881 qualname = getattr(obj, '__qualname__', None) 

1882 if attrs is not None: 

1883 for k, v in attrs.items(): 

1884 postproc_list.append((setattr, (obj, k, v))) 

1885 # TODO: Consider using the state argument to save_reduce? 

1886 if qualname is not None: 

1887 postproc_list.append((setattr, (obj, '__qualname__', qualname))) 

1888 

1889 if not hasattr(obj, '__orig_bases__'): 

1890 _save_with_postproc(pickler, (_create_type, ( 

1891 type(obj), obj.__name__, obj.__bases__, _dict 

1892 )), obj=obj, postproc_list=postproc_list) 

1893 else: 

1894 # This case will always work, but might be overkill. 

1895 _metadict = { 

1896 'metaclass': type(obj) 

1897 } 

1898 

1899 if _dict: 

1900 _dict_update = PartialType(_setitems, source=_dict) 

1901 else: 

1902 _dict_update = None 

1903 

1904 _save_with_postproc(pickler, (new_class, ( 

1905 obj.__name__, obj.__orig_bases__, _metadict, _dict_update 

1906 )), obj=obj, postproc_list=postproc_list) 

1907 logger.trace(pickler, "# T2") 

1908 else: 

1909 obj_name = getattr(obj, '__qualname__', getattr(obj, '__name__', None)) 

1910 logger.trace(pickler, "T4: %s", obj) 

1911 if incorrectly_named: 

1912 warnings.warn( 

1913 "Cannot locate reference to %r." % (obj,), 

1914 PicklingWarning, 

1915 stacklevel=3, 

1916 ) 

1917 if obj_recursive: 

1918 warnings.warn( 

1919 "Cannot pickle %r: %s.%s has recursive self-references that " 

1920 "trigger a RecursionError." % (obj, obj.__module__, obj_name), 

1921 PicklingWarning, 

1922 stacklevel=3, 

1923 ) 

1924 #print (obj.__dict__) 

1925 #print ("%s\n%s" % (type(obj), obj.__name__)) 

1926 #print ("%s\n%s" % (obj.__bases__, obj.__dict__)) 

1927 StockPickler.save_global(pickler, obj, name=obj_name) 

1928 logger.trace(pickler, "# T4") 

1929 return 

1930 

1931@register(property) 

1932@register(abc.abstractproperty) 

1933def save_property(pickler, obj): 

1934 logger.trace(pickler, "Pr: %s", obj) 

1935 pickler.save_reduce(type(obj), (obj.fget, obj.fset, obj.fdel, obj.__doc__), 

1936 obj=obj) 

1937 logger.trace(pickler, "# Pr") 

1938 

1939@register(staticmethod) 

1940@register(classmethod) 

1941@register(abc.abstractstaticmethod) 

1942@register(abc.abstractclassmethod) 

1943def save_classmethod(pickler, obj): 

1944 logger.trace(pickler, "Cm: %s", obj) 

1945 orig_func = obj.__func__ 

1946 

1947 # if type(obj.__dict__) is dict: 

1948 # if obj.__dict__: 

1949 # state = obj.__dict__ 

1950 # else: 

1951 # state = None 

1952 # else: 

1953 # state = (None, {'__dict__', obj.__dict__}) 

1954 

1955 pickler.save_reduce(type(obj), (orig_func,), obj=obj) 

1956 logger.trace(pickler, "# Cm") 

1957 

1958@register(FunctionType) 

1959def save_function(pickler, obj): 

1960 if not _locate_function(obj, pickler): 

1961 if type(obj.__code__) is not CodeType: 

1962 # Some PyPy builtin functions have no module name, and thus are not 

1963 # able to be located 

1964 module_name = getattr(obj, '__module__', None) 

1965 if module_name is None: 

1966 module_name = __builtin__.__name__ 

1967 module = _import_module(module_name, safe=True) 

1968 _pypy_builtin = False 

1969 try: 

1970 found, _ = _getattribute(module, obj.__qualname__) 

1971 if getattr(found, '__func__', None) is obj: 

1972 _pypy_builtin = True 

1973 except AttributeError: 

1974 pass 

1975 

1976 if _pypy_builtin: 

1977 logger.trace(pickler, "F3: %s", obj) 

1978 pickler.save_reduce(getattr, (found, '__func__'), obj=obj) 

1979 logger.trace(pickler, "# F3") 

1980 return 

1981 

1982 logger.trace(pickler, "F1: %s", obj) 

1983 _recurse = getattr(pickler, '_recurse', None) 

1984 _postproc = getattr(pickler, '_postproc', None) 

1985 _main_modified = getattr(pickler, '_main_modified', None) 

1986 _original_main = getattr(pickler, '_original_main', __builtin__)#'None' 

1987 postproc_list = [] 

1988 if _recurse: 

1989 # recurse to get all globals referred to by obj 

1990 from .detect import globalvars 

1991 globs_copy = globalvars(obj, recurse=True, builtin=True) 

1992 

1993 # Add the name of the module to the globs dictionary to prevent 

1994 # the duplication of the dictionary. Pickle the unpopulated 

1995 # globals dictionary and set the remaining items after the function 

1996 # is created to correctly handle recursion. 

1997 globs = {'__name__': obj.__module__} 

1998 else: 

1999 globs_copy = obj.__globals__ 

2000 

2001 # If the globals is the __dict__ from the module being saved as a 

2002 # session, substitute it by the dictionary being actually saved. 

2003 if _main_modified and globs_copy is _original_main.__dict__: 

2004 globs_copy = getattr(pickler, '_main', _original_main).__dict__ 

2005 globs = globs_copy 

2006 # If the globals is a module __dict__, do not save it in the pickle. 

2007 elif globs_copy is not None and obj.__module__ is not None and \ 

2008 getattr(_import_module(obj.__module__, True), '__dict__', None) is globs_copy: 

2009 globs = globs_copy 

2010 else: 

2011 globs = {'__name__': obj.__module__} 

2012 

2013 if globs_copy is not None and globs is not globs_copy: 

2014 # In the case that the globals are copied, we need to ensure that 

2015 # the globals dictionary is updated when all objects in the 

2016 # dictionary are already created. 

2017 glob_ids = {id(g) for g in globs_copy.values()} 

2018 for stack_element in _postproc: 

2019 if stack_element in glob_ids: 

2020 _postproc[stack_element].append((_setitems, (globs, globs_copy))) 

2021 break 

2022 else: 

2023 postproc_list.append((_setitems, (globs, globs_copy))) 

2024 

2025 closure = obj.__closure__ 

2026 state_dict = {} 

2027 for fattrname in ('__doc__', '__kwdefaults__', '__annotations__'): 

2028 fattr = getattr(obj, fattrname, None) 

2029 if fattr is not None: 

2030 state_dict[fattrname] = fattr 

2031 if obj.__qualname__ != obj.__name__: 

2032 state_dict['__qualname__'] = obj.__qualname__ 

2033 if '__name__' not in globs or obj.__module__ != globs['__name__']: 

2034 state_dict['__module__'] = obj.__module__ 

2035 

2036 state = obj.__dict__ 

2037 if type(state) is not dict: 

2038 state_dict['__dict__'] = state 

2039 state = None 

2040 if state_dict: 

2041 state = state, state_dict 

2042 

2043 _save_with_postproc(pickler, (_create_function, ( 

2044 obj.__code__, globs, obj.__name__, obj.__defaults__, 

2045 closure 

2046 ), state), obj=obj, postproc_list=postproc_list) 

2047 

2048 # Lift closure cell update to earliest function (#458) 

2049 if _postproc: 

2050 topmost_postproc = next(iter(_postproc.values()), None) 

2051 if closure and topmost_postproc: 

2052 for cell in closure: 

2053 possible_postproc = (setattr, (cell, 'cell_contents', obj)) 

2054 try: 

2055 topmost_postproc.remove(possible_postproc) 

2056 except ValueError: 

2057 continue 

2058 

2059 # Change the value of the cell 

2060 pickler.save_reduce(*possible_postproc) 

2061 # pop None created by calling preprocessing step off stack 

2062 pickler.write(POP) 

2063 

2064 logger.trace(pickler, "# F1") 

2065 else: 

2066 logger.trace(pickler, "F2: %s", obj) 

2067 name = getattr(obj, '__qualname__', getattr(obj, '__name__', None)) 

2068 StockPickler.save_global(pickler, obj, name=name) 

2069 logger.trace(pickler, "# F2") 

2070 return 

2071 

2072if HAS_CTYPES and hasattr(ctypes, 'pythonapi'): 

2073 _PyCapsule_New = ctypes.pythonapi.PyCapsule_New 

2074 _PyCapsule_New.argtypes = (ctypes.c_void_p, ctypes.c_char_p, ctypes.c_void_p) 

2075 _PyCapsule_New.restype = ctypes.py_object 

2076 _PyCapsule_GetPointer = ctypes.pythonapi.PyCapsule_GetPointer 

2077 _PyCapsule_GetPointer.argtypes = (ctypes.py_object, ctypes.c_char_p) 

2078 _PyCapsule_GetPointer.restype = ctypes.c_void_p 

2079 _PyCapsule_GetDestructor = ctypes.pythonapi.PyCapsule_GetDestructor 

2080 _PyCapsule_GetDestructor.argtypes = (ctypes.py_object,) 

2081 _PyCapsule_GetDestructor.restype = ctypes.c_void_p 

2082 _PyCapsule_GetContext = ctypes.pythonapi.PyCapsule_GetContext 

2083 _PyCapsule_GetContext.argtypes = (ctypes.py_object,) 

2084 _PyCapsule_GetContext.restype = ctypes.c_void_p 

2085 _PyCapsule_GetName = ctypes.pythonapi.PyCapsule_GetName 

2086 _PyCapsule_GetName.argtypes = (ctypes.py_object,) 

2087 _PyCapsule_GetName.restype = ctypes.c_char_p 

2088 _PyCapsule_IsValid = ctypes.pythonapi.PyCapsule_IsValid 

2089 _PyCapsule_IsValid.argtypes = (ctypes.py_object, ctypes.c_char_p) 

2090 _PyCapsule_IsValid.restype = ctypes.c_bool 

2091 _PyCapsule_SetContext = ctypes.pythonapi.PyCapsule_SetContext 

2092 _PyCapsule_SetContext.argtypes = (ctypes.py_object, ctypes.c_void_p) 

2093 _PyCapsule_SetDestructor = ctypes.pythonapi.PyCapsule_SetDestructor 

2094 _PyCapsule_SetDestructor.argtypes = (ctypes.py_object, ctypes.c_void_p) 

2095 _PyCapsule_SetName = ctypes.pythonapi.PyCapsule_SetName 

2096 _PyCapsule_SetName.argtypes = (ctypes.py_object, ctypes.c_char_p) 

2097 _PyCapsule_SetPointer = ctypes.pythonapi.PyCapsule_SetPointer 

2098 _PyCapsule_SetPointer.argtypes = (ctypes.py_object, ctypes.c_void_p) 

2099 #from _socket import CAPI as _testcapsule 

2100 _testcapsule_name = b'dill._dill._testcapsule' 

2101 _testcapsule = _PyCapsule_New( 

2102 ctypes.cast(_PyCapsule_New, ctypes.c_void_p), 

2103 ctypes.c_char_p(_testcapsule_name), 

2104 None 

2105 ) 

2106 PyCapsuleType = type(_testcapsule) 

2107 @register(PyCapsuleType) 

2108 def save_capsule(pickler, obj): 

2109 logger.trace(pickler, "Cap: %s", obj) 

2110 name = _PyCapsule_GetName(obj) 

2111 #warnings.warn('Pickling a PyCapsule (%s) does not pickle any C data structures and could cause segmentation faults or other memory errors when unpickling.' % (name,), PicklingWarning) 

2112 pointer = _PyCapsule_GetPointer(obj, name) 

2113 context = _PyCapsule_GetContext(obj) 

2114 destructor = _PyCapsule_GetDestructor(obj) 

2115 pickler.save_reduce(_create_capsule, (pointer, name, context, destructor), obj=obj) 

2116 logger.trace(pickler, "# Cap") 

2117 _incedental_reverse_typemap['PyCapsuleType'] = PyCapsuleType 

2118 _reverse_typemap['PyCapsuleType'] = PyCapsuleType 

2119 _incedental_types.add(PyCapsuleType) 

2120else: 

2121 _testcapsule = None 

2122 

2123@register(ContextType) 

2124def save_context(pickler, obj): 

2125 logger.trace(pickler, "Cx: %s", obj) 

2126 pickler.save_reduce(ContextType, tuple(obj.items()), obj=obj) 

2127 logger.trace(pickler, "# Cx") 

2128 

2129 

2130############################# 

2131# A quick fix for issue #500 

2132# This should be removed when a better solution is found. 

2133 

2134if hasattr(dataclasses, "_HAS_DEFAULT_FACTORY_CLASS"): 

2135 @register(dataclasses._HAS_DEFAULT_FACTORY_CLASS) 

2136 def save_dataclasses_HAS_DEFAULT_FACTORY_CLASS(pickler, obj): 

2137 logger.trace(pickler, "DcHDF: %s", obj) 

2138 pickler.write(GLOBAL + b"dataclasses\n_HAS_DEFAULT_FACTORY\n") 

2139 logger.trace(pickler, "# DcHDF") 

2140 

2141if hasattr(dataclasses, "MISSING"): 

2142 @register(type(dataclasses.MISSING)) 

2143 def save_dataclasses_MISSING_TYPE(pickler, obj): 

2144 logger.trace(pickler, "DcM: %s", obj) 

2145 pickler.write(GLOBAL + b"dataclasses\nMISSING\n") 

2146 logger.trace(pickler, "# DcM") 

2147 

2148if hasattr(dataclasses, "KW_ONLY"): 

2149 @register(type(dataclasses.KW_ONLY)) 

2150 def save_dataclasses_KW_ONLY_TYPE(pickler, obj): 

2151 logger.trace(pickler, "DcKWO: %s", obj) 

2152 pickler.write(GLOBAL + b"dataclasses\nKW_ONLY\n") 

2153 logger.trace(pickler, "# DcKWO") 

2154 

2155if hasattr(dataclasses, "_FIELD_BASE"): 

2156 @register(dataclasses._FIELD_BASE) 

2157 def save_dataclasses_FIELD_BASE(pickler, obj): 

2158 logger.trace(pickler, "DcFB: %s", obj) 

2159 pickler.write(GLOBAL + b"dataclasses\n" + obj.name.encode() + b"\n") 

2160 logger.trace(pickler, "# DcFB") 

2161 

2162############################# 

2163 

2164# quick sanity checking 

2165def pickles(obj,exact=False,safe=False,**kwds): 

2166 """ 

2167 Quick check if object pickles with dill. 

2168 

2169 If *exact=True* then an equality test is done to check if the reconstructed 

2170 object matches the original object. 

2171 

2172 If *safe=True* then any exception will raised in copy signal that the 

2173 object is not picklable, otherwise only pickling errors will be trapped. 

2174 

2175 Additional keyword arguments are as :func:`dumps` and :func:`loads`. 

2176 """ 

2177 if safe: exceptions = (Exception,) # RuntimeError, ValueError 

2178 else: 

2179 exceptions = (TypeError, AssertionError, NotImplementedError, PicklingError, UnpicklingError) 

2180 try: 

2181 pik = copy(obj, **kwds) 

2182 #FIXME: should check types match first, then check content if "exact" 

2183 try: 

2184 #FIXME: should be "(pik == obj).all()" for numpy comparison, though that'll fail if shapes differ 

2185 result = bool(pik.all() == obj.all()) 

2186 except (AttributeError, TypeError): 

2187 warnings.filterwarnings('ignore') #FIXME: be specific 

2188 result = pik == obj 

2189 if warnings.filters: del warnings.filters[0] 

2190 if hasattr(result, 'toarray'): # for unusual types like sparse matrix 

2191 result = result.toarray().all() 

2192 if result: return True 

2193 if not exact: 

2194 result = type(pik) == type(obj) 

2195 if result: return result 

2196 # class instances might have been dumped with byref=False 

2197 return repr(type(pik)) == repr(type(obj)) #XXX: InstanceType? 

2198 return False 

2199 except exceptions: 

2200 return False 

2201 

2202def check(obj, *args, **kwds): 

2203 """ 

2204 Check pickling of an object across another process. 

2205 

2206 *python* is the path to the python interpreter (defaults to sys.executable) 

2207 

2208 Set *verbose=True* to print the unpickled object in the other process. 

2209 

2210 Additional keyword arguments are as :func:`dumps` and :func:`loads`. 

2211 """ 

2212 # == undocumented == 

2213 # python -- the string path or executable name of the selected python 

2214 # verbose -- if True, be verbose about printing warning messages 

2215 # all other args and kwds are passed to dill.dumps #FIXME: ignore on load 

2216 verbose = kwds.pop('verbose', False) 

2217 python = kwds.pop('python', None) 

2218 if python is None: 

2219 import sys 

2220 python = sys.executable 

2221 # type check 

2222 isinstance(python, str) 

2223 import subprocess 

2224 fail = True 

2225 try: 

2226 _obj = dumps(obj, *args, **kwds) 

2227 fail = False 

2228 finally: 

2229 if fail and verbose: 

2230 print("DUMP FAILED") 

2231 #FIXME: fails if python interpreter path contains spaces 

2232 # Use the following instead (which also processes the 'ignore' keyword): 

2233 # ignore = kwds.pop('ignore', None) 

2234 # unpickle = "dill.loads(%s, ignore=%s)"%(repr(_obj), repr(ignore)) 

2235 # cmd = [python, "-c", "import dill; print(%s)"%unpickle] 

2236 # msg = "SUCCESS" if not subprocess.call(cmd) else "LOAD FAILED" 

2237 msg = "%s -c import dill; print(dill.loads(%s))" % (python, repr(_obj)) 

2238 msg = "SUCCESS" if not subprocess.call(msg.split(None,2)) else "LOAD FAILED" 

2239 if verbose: 

2240 print(msg) 

2241 return 

2242 

2243# use to protect against missing attributes 

2244def is_dill(pickler, child=None): 

2245 "check the dill-ness of your pickler" 

2246 if child is False or not hasattr(pickler.__class__, 'mro'): 

2247 return 'dill' in pickler.__module__ 

2248 return Pickler in pickler.__class__.mro() 

2249 

2250def _extend(): 

2251 """extend pickle with all of dill's registered types""" 

2252 # need to have pickle not choke on _main_module? use is_dill(pickler) 

2253 for t,func in Pickler.dispatch.items(): 

2254 try: 

2255 StockPickler.dispatch[t] = func 

2256 except Exception: #TypeError, PicklingError, UnpicklingError 

2257 logger.trace(pickler, "skip: %s", t) 

2258 return 

2259 

2260del diff, _use_diff, use_diff 

2261 

2262# EOF