Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.10/site-packages/dill/_dill.py: 41%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1392 statements  

1# -*- coding: utf-8 -*- 

2# 

3# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) 

4# Copyright (c) 2008-2015 California Institute of Technology. 

5# Copyright (c) 2016-2025 The Uncertainty Quantification Foundation. 

6# License: 3-clause BSD. The full license text is available at: 

7# - https://github.com/uqfoundation/dill/blob/master/LICENSE 

8""" 

9dill: a utility for serialization of python objects 

10 

11The primary functions in `dill` are :func:`dump` and 

12:func:`dumps` for serialization ("pickling") to a 

13file or to a string, respectively, and :func:`load` 

14and :func:`loads` for deserialization ("unpickling"), 

15similarly, from a file or from a string. Other notable 

16functions are :func:`~dill.dump_module` and 

17:func:`~dill.load_module`, which are used to save and 

18restore module objects, including an intepreter session. 

19 

20Based on code written by Oren Tirosh and Armin Ronacher. 

21Extended to a (near) full set of the builtin types (in types module), 

22and coded to the pickle interface, by <mmckerns@caltech.edu>. 

23Initial port to python3 by Jonathan Dobson, continued by mmckerns. 

24Tested against "all" python types (Std. Lib. CH 1-15 @ 2.7) by mmckerns. 

25Tested against CH16+ Std. Lib. ... TBD. 

26""" 

27 

28from __future__ import annotations 

29 

30__all__ = [ 

31 'dump','dumps','load','loads','copy', 

32 'Pickler','Unpickler','register','pickle','pickles','check', 

33 'DEFAULT_PROTOCOL','HIGHEST_PROTOCOL','HANDLE_FMODE','CONTENTS_FMODE','FILE_FMODE', 

34 'PickleError','PickleWarning','PicklingError','PicklingWarning','UnpicklingError', 

35 'UnpicklingWarning', 

36] 

37 

38__module__ = 'dill' 

39 

40import warnings 

41from .logger import adapter as logger 

42from .logger import trace as _trace 

43log = logger # backward compatibility (see issue #582) 

44 

45import os 

46import sys 

47diff = None 

48_use_diff = False 

49OLD38 = (sys.hexversion < 0x3080000) 

50OLD39 = (sys.hexversion < 0x3090000) 

51OLD310 = (sys.hexversion < 0x30a0000) 

52OLD312a7 = (sys.hexversion < 0x30c00a7) 

53#XXX: get types from .objtypes ? 

54import builtins as __builtin__ 

55from pickle import _Pickler as StockPickler, Unpickler as StockUnpickler 

56from pickle import GLOBAL, POP 

57from _thread import LockType 

58from _thread import RLock as RLockType 

59try: 

60 from _thread import _ExceptHookArgs as ExceptHookArgsType 

61except ImportError: 

62 ExceptHookArgsType = None 

63try: 

64 from _thread import _ThreadHandle as ThreadHandleType 

65except ImportError: 

66 ThreadHandleType = None 

67#from io import IOBase 

68from types import CodeType, FunctionType, MethodType, GeneratorType, \ 

69 TracebackType, FrameType, ModuleType, BuiltinMethodType 

70BufferType = memoryview #XXX: unregistered 

71ClassType = type # no 'old-style' classes 

72EllipsisType = type(Ellipsis) 

73#FileType = IOBase 

74NotImplementedType = type(NotImplemented) 

75SliceType = slice 

76TypeType = type # 'new-style' classes #XXX: unregistered 

77XRangeType = range 

78from types import MappingProxyType as DictProxyType, new_class 

79from pickle import DEFAULT_PROTOCOL, HIGHEST_PROTOCOL, PickleError, PicklingError, UnpicklingError 

80import __main__ as _main_module 

81import marshal 

82import gc 

83# import zlib 

84import abc 

85import dataclasses 

86from weakref import ReferenceType, ProxyType, CallableProxyType 

87from collections import OrderedDict 

88from enum import Enum, EnumMeta 

89from functools import partial 

90from operator import itemgetter, attrgetter 

91GENERATOR_FAIL = False 

92import importlib.machinery 

93EXTENSION_SUFFIXES = tuple(importlib.machinery.EXTENSION_SUFFIXES) 

94try: 

95 import ctypes 

96 HAS_CTYPES = True 

97 # if using `pypy`, pythonapi is not found 

98 IS_PYPY = not hasattr(ctypes, 'pythonapi') 

99except ImportError: 

100 HAS_CTYPES = False 

101 IS_PYPY = False 

102NumpyUfuncType = None 

103NumpyDType = None 

104NumpyArrayType = None 

105try: 

106 if not importlib.machinery.PathFinder().find_spec('numpy'): 

107 raise ImportError("No module named 'numpy'") 

108 NumpyUfuncType = True 

109 NumpyDType = True 

110 NumpyArrayType = True 

111except ImportError: 

112 pass 

113def __hook__(): 

114 global NumpyArrayType, NumpyDType, NumpyUfuncType 

115 from numpy import ufunc as NumpyUfuncType 

116 from numpy import ndarray as NumpyArrayType 

117 from numpy import dtype as NumpyDType 

118 return True 

119if NumpyArrayType: # then has numpy 

120 def ndarraysubclassinstance(obj_type): 

121 if all((c.__module__, c.__name__) != ('numpy', 'ndarray') for c in obj_type.__mro__): 

122 return False 

123 # anything below here is a numpy array (or subclass) instance 

124 __hook__() # import numpy (so the following works!!!) 

125 # verify that __reduce__ has not been overridden 

126 if obj_type.__reduce_ex__ is not NumpyArrayType.__reduce_ex__ \ 

127 or obj_type.__reduce__ is not NumpyArrayType.__reduce__: 

128 return False 

129 return True 

130 def numpyufunc(obj_type): 

131 return any((c.__module__, c.__name__) == ('numpy', 'ufunc') for c in obj_type.__mro__) 

132 def numpydtype(obj_type): 

133 if all((c.__module__, c.__name__) != ('numpy', 'dtype') for c in obj_type.__mro__): 

134 return False 

135 # anything below here is a numpy dtype 

136 __hook__() # import numpy (so the following works!!!) 

137 return obj_type is type(NumpyDType) # handles subclasses 

138else: 

139 def ndarraysubclassinstance(obj): return False 

140 def numpyufunc(obj): return False 

141 def numpydtype(obj): return False 

142 

143from types import GetSetDescriptorType, ClassMethodDescriptorType, \ 

144 WrapperDescriptorType, MethodDescriptorType, MemberDescriptorType, \ 

145 MethodWrapperType #XXX: unused 

146 

147# make sure to add these 'hand-built' types to _typemap 

148CellType = type((lambda x: lambda y: x)(0).__closure__[0]) 

149PartialType = type(partial(int, base=2)) 

150SuperType = type(super(Exception, TypeError())) 

151ItemGetterType = type(itemgetter(0)) 

152AttrGetterType = type(attrgetter('__repr__')) 

153 

154try: 

155 from functools import _lru_cache_wrapper as LRUCacheType 

156except ImportError: 

157 LRUCacheType = None 

158 

159if not isinstance(LRUCacheType, type): 

160 LRUCacheType = None 

161 

162def get_file_type(*args, **kwargs): 

163 open = kwargs.pop("open", __builtin__.open) 

164 f = open(os.devnull, *args, **kwargs) 

165 t = type(f) 

166 f.close() 

167 return t 

168 

169IS_PYODIDE = sys.platform == 'emscripten' 

170 

171FileType = get_file_type('rb', buffering=0) 

172TextWrapperType = get_file_type('r', buffering=-1) 

173BufferedRandomType = None if IS_PYODIDE else get_file_type('r+b', buffering=-1) 

174BufferedReaderType = get_file_type('rb', buffering=-1) 

175BufferedWriterType = get_file_type('wb', buffering=-1) 

176try: 

177 from _pyio import open as _open 

178 PyTextWrapperType = get_file_type('r', buffering=-1, open=_open) 

179 PyBufferedRandomType = None if IS_PYODIDE else get_file_type('r+b', buffering=-1, open=_open) 

180 PyBufferedReaderType = get_file_type('rb', buffering=-1, open=_open) 

181 PyBufferedWriterType = get_file_type('wb', buffering=-1, open=_open) 

182except ImportError: 

183 PyTextWrapperType = PyBufferedRandomType = PyBufferedReaderType = PyBufferedWriterType = None 

184from io import BytesIO as StringIO 

185InputType = OutputType = None 

186from socket import socket as SocketType 

187#FIXME: additionally calls ForkingPickler.register several times 

188from multiprocessing.reduction import _reduce_socket as reduce_socket 

189try: #pragma: no cover 

190 IS_IPYTHON = __IPYTHON__ # is True 

191 ExitType = None # IPython.core.autocall.ExitAutocall 

192 IPYTHON_SINGLETONS = ('exit', 'quit', 'get_ipython') 

193except NameError: 

194 IS_IPYTHON = False 

195 try: ExitType = type(exit) # apparently 'exit' can be removed 

196 except NameError: ExitType = None 

197 IPYTHON_SINGLETONS = () 

198 

199import inspect 

200import typing 

201 

202 

203### Shims for different versions of Python and dill 

204class Sentinel(object): 

205 """ 

206 Create a unique sentinel object that is pickled as a constant. 

207 """ 

208 def __init__(self, name, module_name=None): 

209 self.name = name 

210 if module_name is None: 

211 # Use the calling frame's module 

212 self.__module__ = inspect.currentframe().f_back.f_globals['__name__'] 

213 else: 

214 self.__module__ = module_name # pragma: no cover 

215 def __repr__(self): 

216 return self.__module__ + '.' + self.name # pragma: no cover 

217 def __copy__(self): 

218 return self # pragma: no cover 

219 def __deepcopy__(self, memo): 

220 return self # pragma: no cover 

221 def __reduce__(self): 

222 return self.name 

223 def __reduce_ex__(self, protocol): 

224 return self.name 

225 

226from . import _shims 

227from ._shims import Reduce, Getattr 

228 

229### File modes 

230#: Pickles the file handle, preserving mode. The position of the unpickled 

231#: object is as for a new file handle. 

232HANDLE_FMODE = 0 

233#: Pickles the file contents, creating a new file if on load the file does 

234#: not exist. The position = min(pickled position, EOF) and mode is chosen 

235#: as such that "best" preserves behavior of the original file. 

236CONTENTS_FMODE = 1 

237#: Pickles the entire file (handle and contents), preserving mode and position. 

238FILE_FMODE = 2 

239 

240### Shorthands (modified from python2.5/lib/pickle.py) 

241def copy(obj, *args, **kwds): 

242 """ 

243 Use pickling to 'copy' an object (i.e. `loads(dumps(obj))`). 

244 

245 See :func:`dumps` and :func:`loads` for keyword arguments. 

246 """ 

247 ignore = kwds.pop('ignore', Unpickler.settings['ignore']) 

248 return loads(dumps(obj, *args, **kwds), ignore=ignore) 

249 

250def dump(obj, file, protocol=None, byref=None, fmode=None, recurse=None, **kwds):#, strictio=None): 

251 """ 

252 Pickle an object to a file. 

253 

254 See :func:`dumps` for keyword arguments. 

255 """ 

256 from .settings import settings 

257 protocol = settings['protocol'] if protocol is None else int(protocol) 

258 _kwds = kwds.copy() 

259 _kwds.update(dict(byref=byref, fmode=fmode, recurse=recurse)) 

260 Pickler(file, protocol, **_kwds).dump(obj) 

261 return 

262 

263def dumps(obj, protocol=None, byref=None, fmode=None, recurse=None, **kwds):#, strictio=None): 

264 """ 

265 Pickle an object to a string. 

266 

267 *protocol* is the pickler protocol, as defined for Python *pickle*. 

268 

269 If *byref=True*, then dill behaves a lot more like pickle as certain 

270 objects (like modules) are pickled by reference as opposed to attempting 

271 to pickle the object itself. 

272 

273 If *recurse=True*, then objects referred to in the global dictionary 

274 are recursively traced and pickled, instead of the default behavior 

275 of attempting to store the entire global dictionary. This is needed for 

276 functions defined via *exec()*. 

277 

278 *fmode* (:const:`HANDLE_FMODE`, :const:`CONTENTS_FMODE`, 

279 or :const:`FILE_FMODE`) indicates how file handles will be pickled. 

280 For example, when pickling a data file handle for transfer to a remote 

281 compute service, *FILE_FMODE* will include the file contents in the 

282 pickle and cursor position so that a remote method can operate 

283 transparently on an object with an open file handle. 

284 

285 Default values for keyword arguments can be set in :mod:`dill.settings`. 

286 """ 

287 file = StringIO() 

288 dump(obj, file, protocol, byref, fmode, recurse, **kwds)#, strictio) 

289 return file.getvalue() 

290 

291def load(file, ignore=None, **kwds): 

292 """ 

293 Unpickle an object from a file. 

294 

295 See :func:`loads` for keyword arguments. 

296 """ 

297 return Unpickler(file, ignore=ignore, **kwds).load() 

298 

299def loads(str, ignore=None, **kwds): 

300 """ 

301 Unpickle an object from a string. 

302 

303 If *ignore=False* then objects whose class is defined in the module 

304 *__main__* are updated to reference the existing class in *__main__*, 

305 otherwise they are left to refer to the reconstructed type, which may 

306 be different. 

307 

308 Default values for keyword arguments can be set in :mod:`dill.settings`. 

309 """ 

310 file = StringIO(str) 

311 return load(file, ignore, **kwds) 

312 

313# def dumpzs(obj, protocol=None): 

314# """pickle an object to a compressed string""" 

315# return zlib.compress(dumps(obj, protocol)) 

316 

317# def loadzs(str): 

318# """unpickle an object from a compressed string""" 

319# return loads(zlib.decompress(str)) 

320 

321### End: Shorthands ### 

322 

323class MetaCatchingDict(dict): 

324 def get(self, key, default=None): 

325 try: 

326 return self[key] 

327 except KeyError: 

328 return default 

329 

330 def __missing__(self, key): 

331 if issubclass(key, type): 

332 return save_type 

333 else: 

334 raise KeyError() 

335 

336class PickleWarning(Warning, PickleError): 

337 pass 

338 

339class PicklingWarning(PickleWarning, PicklingError): 

340 pass 

341 

342class UnpicklingWarning(PickleWarning, UnpicklingError): 

343 pass 

344 

345### Extend the Picklers 

346class Pickler(StockPickler): 

347 """python's Pickler extended to interpreter sessions""" 

348 dispatch: typing.Dict[type, typing.Callable[[Pickler, typing.Any], None]] \ 

349 = MetaCatchingDict(StockPickler.dispatch.copy()) 

350 """The dispatch table, a dictionary of serializing functions used 

351 by Pickler to save objects of specific types. Use :func:`pickle` 

352 or :func:`register` to associate types to custom functions. 

353 

354 :meta hide-value: 

355 """ 

356 _session = False 

357 from .settings import settings 

358 

359 def __init__(self, file, *args, **kwds): 

360 settings = Pickler.settings 

361 _byref = kwds.pop('byref', None) 

362 #_strictio = kwds.pop('strictio', None) 

363 _fmode = kwds.pop('fmode', None) 

364 _recurse = kwds.pop('recurse', None) 

365 StockPickler.__init__(self, file, *args, **kwds) 

366 self._main = _main_module 

367 self._diff_cache = {} 

368 self._byref = settings['byref'] if _byref is None else _byref 

369 self._strictio = False #_strictio 

370 self._fmode = settings['fmode'] if _fmode is None else _fmode 

371 self._recurse = settings['recurse'] if _recurse is None else _recurse 

372 self._postproc = OrderedDict() 

373 self._file = file 

374 

375 def save(self, obj, save_persistent_id=True): 

376 # numpy hack 

377 obj_type = type(obj) 

378 if NumpyArrayType and not (obj_type is type or obj_type in Pickler.dispatch): 

379 # register if the object is a numpy ufunc 

380 # thanks to Paul Kienzle for pointing out ufuncs didn't pickle 

381 if numpyufunc(obj_type): 

382 @register(obj_type) 

383 def save_numpy_ufunc(pickler, obj): 

384 logger.trace(pickler, "Nu: %s", obj) 

385 name = getattr(obj, '__qualname__', getattr(obj, '__name__', None)) 

386 StockPickler.save_global(pickler, obj, name=name) 

387 logger.trace(pickler, "# Nu") 

388 return 

389 # NOTE: the above 'save' performs like: 

390 # import copy_reg 

391 # def udump(f): return f.__name__ 

392 # def uload(name): return getattr(numpy, name) 

393 # copy_reg.pickle(NumpyUfuncType, udump, uload) 

394 # register if the object is a numpy dtype 

395 if numpydtype(obj_type): 

396 @register(obj_type) 

397 def save_numpy_dtype(pickler, obj): 

398 logger.trace(pickler, "Dt: %s", obj) 

399 pickler.save_reduce(_create_dtypemeta, (obj.type,), obj=obj) 

400 logger.trace(pickler, "# Dt") 

401 return 

402 # NOTE: the above 'save' performs like: 

403 # import copy_reg 

404 # def uload(name): return type(NumpyDType(name)) 

405 # def udump(f): return uload, (f.type,) 

406 # copy_reg.pickle(NumpyDTypeType, udump, uload) 

407 # register if the object is a subclassed numpy array instance 

408 if ndarraysubclassinstance(obj_type): 

409 @register(obj_type) 

410 def save_numpy_array(pickler, obj): 

411 logger.trace(pickler, "Nu: (%s, %s)", obj.shape, obj.dtype) 

412 npdict = getattr(obj, '__dict__', None) 

413 f, args, state = obj.__reduce__() 

414 pickler.save_reduce(_create_array, (f,args,state,npdict), obj=obj) 

415 logger.trace(pickler, "# Nu") 

416 return 

417 # end numpy hack 

418 

419 if GENERATOR_FAIL and obj_type is GeneratorType: 

420 msg = "Can't pickle %s: attribute lookup builtins.generator failed" % GeneratorType 

421 raise PicklingError(msg) 

422 StockPickler.save(self, obj, save_persistent_id) 

423 

424 save.__doc__ = StockPickler.save.__doc__ 

425 

426 def dump(self, obj): #NOTE: if settings change, need to update attributes 

427 logger.trace_setup(self) 

428 StockPickler.dump(self, obj) 

429 dump.__doc__ = StockPickler.dump.__doc__ 

430 

431class Unpickler(StockUnpickler): 

432 """python's Unpickler extended to interpreter sessions and more types""" 

433 from .settings import settings 

434 _session = False 

435 

436 def find_class(self, module, name): 

437 if (module, name) == ('__builtin__', '__main__'): 

438 return self._main.__dict__ #XXX: above set w/save_module_dict 

439 elif (module, name) == ('__builtin__', 'NoneType'): 

440 return type(None) #XXX: special case: NoneType missing 

441 if module == 'dill.dill': module = 'dill._dill' 

442 return StockUnpickler.find_class(self, module, name) 

443 

444 def __init__(self, *args, **kwds): 

445 settings = Pickler.settings 

446 _ignore = kwds.pop('ignore', None) 

447 StockUnpickler.__init__(self, *args, **kwds) 

448 self._main = _main_module 

449 self._ignore = settings['ignore'] if _ignore is None else _ignore 

450 

451 def load(self): #NOTE: if settings change, need to update attributes 

452 obj = StockUnpickler.load(self) 

453 if type(obj).__module__ == getattr(_main_module, '__name__', '__main__'): 

454 if not self._ignore: 

455 # point obj class to main 

456 try: obj.__class__ = getattr(self._main, type(obj).__name__) 

457 except (AttributeError,TypeError): pass # defined in a file 

458 #_main_module.__dict__.update(obj.__dict__) #XXX: should update globals ? 

459 return obj 

460 load.__doc__ = StockUnpickler.load.__doc__ 

461 pass 

462 

463''' 

464def dispatch_table(): 

465 """get the dispatch table of registered types""" 

466 return Pickler.dispatch 

467''' 

468 

469pickle_dispatch_copy = StockPickler.dispatch.copy() 

470 

471def pickle(t, func): 

472 """expose :attr:`~Pickler.dispatch` table for user-created extensions""" 

473 Pickler.dispatch[t] = func 

474 return 

475 

476def register(t): 

477 """decorator to register types to Pickler's :attr:`~Pickler.dispatch` table""" 

478 def proxy(func): 

479 Pickler.dispatch[t] = func 

480 return func 

481 return proxy 

482 

483def _revert_extension(): 

484 """drop dill-registered types from pickle's dispatch table""" 

485 for type, func in list(StockPickler.dispatch.items()): 

486 if func.__module__ == __name__: 

487 del StockPickler.dispatch[type] 

488 if type in pickle_dispatch_copy: 

489 StockPickler.dispatch[type] = pickle_dispatch_copy[type] 

490 

491def use_diff(on=True): 

492 """ 

493 Reduces size of pickles by only including object which have changed. 

494 

495 Decreases pickle size but increases CPU time needed. 

496 Also helps avoid some unpickleable objects. 

497 MUST be called at start of script, otherwise changes will not be recorded. 

498 """ 

499 global _use_diff, diff 

500 _use_diff = on 

501 if _use_diff and diff is None: 

502 try: 

503 from . import diff as d 

504 except ImportError: 

505 import diff as d 

506 diff = d 

507 

508def _create_typemap(): 

509 import types 

510 d = dict(list(__builtin__.__dict__.items()) + \ 

511 list(types.__dict__.items())).items() 

512 for key, value in d: 

513 if getattr(value, '__module__', None) == 'builtins' \ 

514 and type(value) is type: 

515 yield key, value 

516 return 

517_reverse_typemap = dict(_create_typemap()) 

518_reverse_typemap.update({ 

519 'PartialType': PartialType, 

520 'SuperType': SuperType, 

521 'ItemGetterType': ItemGetterType, 

522 'AttrGetterType': AttrGetterType, 

523}) 

524if sys.hexversion < 0x30800a2: 

525 _reverse_typemap.update({ 

526 'CellType': CellType, 

527 }) 

528 

529# "Incidental" implementation specific types. Unpickling these types in another 

530# implementation of Python (PyPy -> CPython) is not guaranteed to work 

531 

532# This dictionary should contain all types that appear in Python implementations 

533# but are not defined in https://docs.python.org/3/library/types.html#standard-interpreter-types 

534x=OrderedDict() 

535_incedental_reverse_typemap = { 

536 'FileType': FileType, 

537 'BufferedRandomType': BufferedRandomType, 

538 'BufferedReaderType': BufferedReaderType, 

539 'BufferedWriterType': BufferedWriterType, 

540 'TextWrapperType': TextWrapperType, 

541 'PyBufferedRandomType': PyBufferedRandomType, 

542 'PyBufferedReaderType': PyBufferedReaderType, 

543 'PyBufferedWriterType': PyBufferedWriterType, 

544 'PyTextWrapperType': PyTextWrapperType, 

545} 

546 

547_incedental_reverse_typemap.update({ 

548 "DictKeysType": type({}.keys()), 

549 "DictValuesType": type({}.values()), 

550 "DictItemsType": type({}.items()), 

551 

552 "OdictKeysType": type(x.keys()), 

553 "OdictValuesType": type(x.values()), 

554 "OdictItemsType": type(x.items()), 

555}) 

556 

557if ExitType: 

558 _incedental_reverse_typemap['ExitType'] = ExitType 

559if InputType: 

560 _incedental_reverse_typemap['InputType'] = InputType 

561 _incedental_reverse_typemap['OutputType'] = OutputType 

562 

563''' 

564try: 

565 import symtable 

566 _incedental_reverse_typemap["SymtableEntryType"] = type(symtable.symtable("", "string", "exec")._table) 

567except: #FIXME: fails to pickle 

568 pass 

569 

570if sys.hexversion >= 0x30a00a0: 

571 _incedental_reverse_typemap['LineIteratorType'] = type(compile('3', '', 'eval').co_lines()) 

572''' 

573 

574if sys.hexversion >= 0x30b00b0 and not IS_PYPY: 

575 from types import GenericAlias 

576 _incedental_reverse_typemap["GenericAliasIteratorType"] = type(iter(GenericAlias(list, (int,)))) 

577 ''' 

578 _incedental_reverse_typemap['PositionsIteratorType'] = type(compile('3', '', 'eval').co_positions()) 

579 ''' 

580 

581try: 

582 import winreg 

583 _incedental_reverse_typemap["HKEYType"] = winreg.HKEYType 

584except ImportError: 

585 pass 

586 

587_reverse_typemap.update(_incedental_reverse_typemap) 

588_incedental_types = set(_incedental_reverse_typemap.values()) 

589 

590del x 

591 

592_typemap = dict((v, k) for k, v in _reverse_typemap.items()) 

593 

594def _unmarshal(string): 

595 return marshal.loads(string) 

596 

597def _load_type(name): 

598 return _reverse_typemap[name] 

599 

600def _create_type(typeobj, *args): 

601 return typeobj(*args) 

602 

603def _create_function(fcode, fglobals, fname=None, fdefaults=None, 

604 fclosure=None, fdict=None, fkwdefaults=None): 

605 # same as FunctionType, but enable passing __dict__ to new function, 

606 # __dict__ is the storehouse for attributes added after function creation 

607 func = FunctionType(fcode, fglobals or dict(), fname, fdefaults, fclosure) 

608 if fdict is not None: 

609 func.__dict__.update(fdict) #XXX: better copy? option to copy? 

610 if fkwdefaults is not None: 

611 func.__kwdefaults__ = fkwdefaults 

612 # 'recurse' only stores referenced modules/objects in fglobals, 

613 # thus we need to make sure that we have __builtins__ as well 

614 if "__builtins__" not in func.__globals__: 

615 func.__globals__["__builtins__"] = globals()["__builtins__"] 

616 # assert id(fglobals) == id(func.__globals__) 

617 return func 

618 

619class match: 

620 """ 

621 Make avaialable a limited structural pattern matching-like syntax for Python < 3.10 

622 

623 Patterns can be only tuples (without types) currently. 

624 Inspired by the package pattern-matching-PEP634. 

625 

626 Usage: 

627 >>> with match(args) as m: 

628 >>> if m.case(('x', 'y')): 

629 >>> # use m.x and m.y 

630 >>> elif m.case(('x', 'y', 'z')): 

631 >>> # use m.x, m.y and m.z 

632 

633 Equivalent native code for Python >= 3.10: 

634 >>> match args: 

635 >>> case (x, y): 

636 >>> # use x and y 

637 >>> case (x, y, z): 

638 >>> # use x, y and z 

639 """ 

640 def __init__(self, value): 

641 self.value = value 

642 self._fields = None 

643 def __enter__(self): 

644 return self 

645 def __exit__(self, *exc_info): 

646 return False 

647 def case(self, args): # *args, **kwargs): 

648 """just handles tuple patterns""" 

649 if len(self.value) != len(args): # + len(kwargs): 

650 return False 

651 #if not all(isinstance(arg, pat) for arg, pat in zip(self.value[len(args):], kwargs.values())): 

652 # return False 

653 self.args = args # (*args, *kwargs) 

654 return True 

655 @property 

656 def fields(self): 

657 # Only bind names to values if necessary. 

658 if self._fields is None: 

659 self._fields = dict(zip(self.args, self.value)) 

660 return self._fields 

661 def __getattr__(self, item): 

662 return self.fields[item] 

663 

664ALL_CODE_PARAMS = [ 

665 # Version New attribute CodeType parameters 

666 ((3,11,'a'), 'co_endlinetable', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name qualname firstlineno linetable endlinetable columntable exceptiontable freevars cellvars'), 

667 ((3,11), 'co_exceptiontable', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name qualname firstlineno linetable exceptiontable freevars cellvars'), 

668 ((3,11,'p'), 'co_qualname', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name qualname firstlineno linetable freevars cellvars'), 

669 ((3,10), 'co_linetable', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name firstlineno linetable freevars cellvars'), 

670 ((3,8), 'co_posonlyargcount', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name firstlineno lnotab freevars cellvars'), 

671 ((3,7), 'co_kwonlyargcount', 'argcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name firstlineno lnotab freevars cellvars'), 

672 ] 

673for version, new_attr, params in ALL_CODE_PARAMS: 

674 if hasattr(CodeType, new_attr): 

675 CODE_VERSION = version 

676 CODE_PARAMS = params.split() 

677 break 

678ENCODE_PARAMS = set(CODE_PARAMS).intersection( 

679 ['code', 'lnotab', 'linetable', 'endlinetable', 'columntable', 'exceptiontable']) 

680 

681def _create_code(*args): 

682 if not isinstance(args[0], int): # co_lnotab stored from >= 3.10 

683 LNOTAB, *args = args 

684 else: # from < 3.10 (or pre-LNOTAB storage) 

685 LNOTAB = b'' 

686 

687 with match(args) as m: 

688 # Python 3.11/3.12a (18 members) 

689 if m.case(( 

690 'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6] 

691 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'qualname', 'firstlineno', # args[6:14] 

692 'linetable', 'exceptiontable', 'freevars', 'cellvars' # args[14:] 

693 )): 

694 if CODE_VERSION == (3,11): 

695 return CodeType( 

696 *args[:6], 

697 args[6].encode() if hasattr(args[6], 'encode') else args[6], # code 

698 *args[7:14], 

699 args[14].encode() if hasattr(args[14], 'encode') else args[14], # linetable 

700 args[15].encode() if hasattr(args[15], 'encode') else args[15], # exceptiontable 

701 args[16], 

702 args[17], 

703 ) 

704 fields = m.fields 

705 # PyPy 3.11 7.3.19+ (17 members) 

706 elif m.case(( 

707 'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6] 

708 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'qualname', # args[6:13] 

709 'firstlineno', 'linetable', 'freevars', 'cellvars' # args[13:] 

710 )): 

711 if CODE_VERSION == (3,11,'p'): 

712 return CodeType( 

713 *args[:6], 

714 args[6].encode() if hasattr(args[6], 'encode') else args[6], # code 

715 *args[7:14], 

716 args[14].encode() if hasattr(args[14], 'encode') else args[14], # linetable 

717 args[15], 

718 args[16], 

719 ) 

720 fields = m.fields 

721 # Python 3.10 or 3.8/3.9 (16 members) 

722 elif m.case(( 

723 'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6] 

724 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'firstlineno', # args[6:13] 

725 'LNOTAB_OR_LINETABLE', 'freevars', 'cellvars' # args[13:] 

726 )): 

727 if CODE_VERSION == (3,10) or CODE_VERSION == (3,8): 

728 return CodeType( 

729 *args[:6], 

730 args[6].encode() if hasattr(args[6], 'encode') else args[6], # code 

731 *args[7:13], 

732 args[13].encode() if hasattr(args[13], 'encode') else args[13], # lnotab/linetable 

733 args[14], 

734 args[15], 

735 ) 

736 fields = m.fields 

737 if CODE_VERSION >= (3,10): 

738 fields['linetable'] = m.LNOTAB_OR_LINETABLE 

739 else: 

740 fields['lnotab'] = LNOTAB if LNOTAB else m.LNOTAB_OR_LINETABLE 

741 # Python 3.7 (15 args) 

742 elif m.case(( 

743 'argcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:5] 

744 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'firstlineno', # args[5:12] 

745 'lnotab', 'freevars', 'cellvars' # args[12:] 

746 )): 

747 if CODE_VERSION == (3,7): 

748 return CodeType( 

749 *args[:5], 

750 args[5].encode() if hasattr(args[5], 'encode') else args[5], # code 

751 *args[6:12], 

752 args[12].encode() if hasattr(args[12], 'encode') else args[12], # lnotab 

753 args[13], 

754 args[14], 

755 ) 

756 fields = m.fields 

757 # Python 3.11a (20 members) 

758 elif m.case(( 

759 'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6] 

760 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'qualname', 'firstlineno', # args[6:14] 

761 'linetable', 'endlinetable', 'columntable', 'exceptiontable', 'freevars', 'cellvars' # args[14:] 

762 )): 

763 if CODE_VERSION == (3,11,'a'): 

764 return CodeType( 

765 *args[:6], 

766 args[6].encode() if hasattr(args[6], 'encode') else args[6], # code 

767 *args[7:14], 

768 *(a.encode() if hasattr(a, 'encode') else a for a in args[14:18]), # linetable-exceptiontable 

769 args[18], 

770 args[19], 

771 ) 

772 fields = m.fields 

773 else: 

774 raise UnpicklingError("pattern match for code object failed") 

775 

776 # The args format doesn't match this version. 

777 fields.setdefault('posonlyargcount', 0) # from python <= 3.7 

778 fields.setdefault('lnotab', LNOTAB) # from python >= 3.10 

779 fields.setdefault('linetable', b'') # from python <= 3.9 

780 fields.setdefault('qualname', fields['name']) # from python <= 3.10 

781 fields.setdefault('exceptiontable', b'') # from python <= 3.10 

782 fields.setdefault('endlinetable', None) # from python != 3.11a 

783 fields.setdefault('columntable', None) # from python != 3.11a 

784 

785 args = (fields[k].encode() if k in ENCODE_PARAMS and hasattr(fields[k], 'encode') else fields[k] 

786 for k in CODE_PARAMS) 

787 return CodeType(*args) 

788 

789def _create_ftype(ftypeobj, func, args, kwds): 

790 if kwds is None: 

791 kwds = {} 

792 if args is None: 

793 args = () 

794 return ftypeobj(func, *args, **kwds) 

795 

796def _create_typing_tuple(argz, *args): #NOTE: workaround python/cpython#94245 

797 if not argz: 

798 return typing.Tuple[()].copy_with(()) 

799 if argz == ((),): 

800 return typing.Tuple[()] 

801 return typing.Tuple[argz] 

802 

803if ThreadHandleType: 

804 def _create_thread_handle(ident, done, *args): #XXX: ignores 'blocking' 

805 from threading import _make_thread_handle 

806 handle = _make_thread_handle(ident) 

807 if done: 

808 handle._set_done() 

809 return handle 

810 

811def _create_lock(locked, *args): #XXX: ignores 'blocking' 

812 from threading import Lock 

813 lock = Lock() 

814 if locked: 

815 if not lock.acquire(False): 

816 raise UnpicklingError("Cannot acquire lock") 

817 return lock 

818 

819def _create_rlock(count, owner, *args): #XXX: ignores 'blocking' 

820 lock = RLockType() 

821 if owner is not None: 

822 lock._acquire_restore((count, owner)) 

823 if owner and not lock._is_owned(): 

824 raise UnpicklingError("Cannot acquire lock") 

825 return lock 

826 

827# thanks to matsjoyce for adding all the different file modes 

828def _create_filehandle(name, mode, position, closed, open, strictio, fmode, fdata): # buffering=0 

829 # only pickles the handle, not the file contents... good? or StringIO(data)? 

830 # (for file contents see: http://effbot.org/librarybook/copy-reg.htm) 

831 # NOTE: handle special cases first (are there more special cases?) 

832 names = {'<stdin>':sys.__stdin__, '<stdout>':sys.__stdout__, 

833 '<stderr>':sys.__stderr__} #XXX: better fileno=(0,1,2) ? 

834 if name in list(names.keys()): 

835 f = names[name] #XXX: safer "f=sys.stdin" 

836 elif name == '<tmpfile>': 

837 f = os.tmpfile() 

838 elif name == '<fdopen>': 

839 import tempfile 

840 f = tempfile.TemporaryFile(mode) 

841 else: 

842 try: 

843 exists = os.path.exists(name) 

844 except Exception: 

845 exists = False 

846 if not exists: 

847 if strictio: 

848 raise FileNotFoundError("[Errno 2] No such file or directory: '%s'" % name) 

849 elif "r" in mode and fmode != FILE_FMODE: 

850 name = '<fdopen>' # or os.devnull? 

851 current_size = 0 # or maintain position? 

852 else: 

853 current_size = os.path.getsize(name) 

854 

855 if position > current_size: 

856 if strictio: 

857 raise ValueError("invalid buffer size") 

858 elif fmode == CONTENTS_FMODE: 

859 position = current_size 

860 # try to open the file by name 

861 # NOTE: has different fileno 

862 try: 

863 #FIXME: missing: *buffering*, encoding, softspace 

864 if fmode == FILE_FMODE: 

865 f = open(name, mode if "w" in mode else "w") 

866 f.write(fdata) 

867 if "w" not in mode: 

868 f.close() 

869 f = open(name, mode) 

870 elif name == '<fdopen>': # file did not exist 

871 import tempfile 

872 f = tempfile.TemporaryFile(mode) 

873 # treat x mode as w mode 

874 elif fmode == CONTENTS_FMODE \ 

875 and ("w" in mode or "x" in mode): 

876 # stop truncation when opening 

877 flags = os.O_CREAT 

878 if "+" in mode: 

879 flags |= os.O_RDWR 

880 else: 

881 flags |= os.O_WRONLY 

882 f = os.fdopen(os.open(name, flags), mode) 

883 # set name to the correct value 

884 r = getattr(f, "buffer", f) 

885 r = getattr(r, "raw", r) 

886 r.name = name 

887 assert f.name == name 

888 else: 

889 f = open(name, mode) 

890 except (IOError, FileNotFoundError): 

891 err = sys.exc_info()[1] 

892 raise UnpicklingError(err) 

893 if closed: 

894 f.close() 

895 elif position >= 0 and fmode != HANDLE_FMODE: 

896 f.seek(position) 

897 return f 

898 

899def _create_stringi(value, position, closed): 

900 f = StringIO(value) 

901 if closed: f.close() 

902 else: f.seek(position) 

903 return f 

904 

905def _create_stringo(value, position, closed): 

906 f = StringIO() 

907 if closed: f.close() 

908 else: 

909 f.write(value) 

910 f.seek(position) 

911 return f 

912 

913class _itemgetter_helper(object): 

914 def __init__(self): 

915 self.items = [] 

916 def __getitem__(self, item): 

917 self.items.append(item) 

918 return 

919 

920class _attrgetter_helper(object): 

921 def __init__(self, attrs, index=None): 

922 self.attrs = attrs 

923 self.index = index 

924 def __getattribute__(self, attr): 

925 attrs = object.__getattribute__(self, "attrs") 

926 index = object.__getattribute__(self, "index") 

927 if index is None: 

928 index = len(attrs) 

929 attrs.append(attr) 

930 else: 

931 attrs[index] = ".".join([attrs[index], attr]) 

932 return type(self)(attrs, index) 

933 

934class _dictproxy_helper(dict): 

935 def __ror__(self, a): 

936 return a 

937 

938_dictproxy_helper_instance = _dictproxy_helper() 

939 

940__d = {} 

941try: 

942 # In CPython 3.9 and later, this trick can be used to exploit the 

943 # implementation of the __or__ function of MappingProxyType to get the true 

944 # mapping referenced by the proxy. It may work for other implementations, 

945 # but is not guaranteed. 

946 MAPPING_PROXY_TRICK = __d is (DictProxyType(__d) | _dictproxy_helper_instance) 

947except Exception: 

948 MAPPING_PROXY_TRICK = False 

949del __d 

950 

951# _CELL_REF and _CELL_EMPTY are used to stay compatible with versions of dill 

952# whose _create_cell functions do not have a default value. 

953# _CELL_REF can be safely removed entirely (replaced by empty tuples for calls 

954# to _create_cell) once breaking changes are allowed. 

955_CELL_REF = None 

956_CELL_EMPTY = Sentinel('_CELL_EMPTY') 

957 

958def _create_cell(contents=None): 

959 if contents is not _CELL_EMPTY: 

960 value = contents 

961 return (lambda: value).__closure__[0] 

962 

963def _create_weakref(obj, *args): 

964 from weakref import ref 

965 if obj is None: # it's dead 

966 from collections import UserDict 

967 return ref(UserDict(), *args) 

968 return ref(obj, *args) 

969 

970def _create_weakproxy(obj, callable=False, *args): 

971 from weakref import proxy 

972 if obj is None: # it's dead 

973 if callable: return proxy(lambda x:x, *args) 

974 from collections import UserDict 

975 return proxy(UserDict(), *args) 

976 return proxy(obj, *args) 

977 

978def _eval_repr(repr_str): 

979 return eval(repr_str) 

980 

981def _create_array(f, args, state, npdict=None): 

982 #array = numpy.core.multiarray._reconstruct(*args) 

983 array = f(*args) 

984 array.__setstate__(state) 

985 if npdict is not None: # we also have saved state in __dict__ 

986 array.__dict__.update(npdict) 

987 return array 

988 

989def _create_dtypemeta(scalar_type): 

990 if NumpyDType is True: __hook__() # a bit hacky I think 

991 if scalar_type is None: 

992 return NumpyDType 

993 return type(NumpyDType(scalar_type)) 

994 

995def _create_namedtuple(name, fieldnames, modulename, defaults=None): 

996 class_ = _import_module(modulename + '.' + name, safe=True) 

997 if class_ is not None: 

998 return class_ 

999 import collections 

1000 t = collections.namedtuple(name, fieldnames, defaults=defaults, module=modulename) 

1001 return t 

1002 

1003def _create_capsule(pointer, name, context, destructor): 

1004 attr_found = False 

1005 try: 

1006 # based on https://github.com/python/cpython/blob/f4095e53ab708d95e019c909d5928502775ba68f/Objects/capsule.c#L209-L231 

1007 uname = name.decode('utf8') 

1008 for i in range(1, uname.count('.')+1): 

1009 names = uname.rsplit('.', i) 

1010 try: 

1011 module = __import__(names[0]) 

1012 except ImportError: 

1013 pass 

1014 obj = module 

1015 for attr in names[1:]: 

1016 obj = getattr(obj, attr) 

1017 capsule = obj 

1018 attr_found = True 

1019 break 

1020 except Exception: 

1021 pass 

1022 

1023 if attr_found: 

1024 if _PyCapsule_IsValid(capsule, name): 

1025 return capsule 

1026 raise UnpicklingError("%s object exists at %s but a PyCapsule object was expected." % (type(capsule), name)) 

1027 else: 

1028 #warnings.warn('Creating a new PyCapsule %s for a C data structure that may not be present in memory. Segmentation faults or other memory errors are possible.' % (name,), UnpicklingWarning) 

1029 capsule = _PyCapsule_New(pointer, name, destructor) 

1030 _PyCapsule_SetContext(capsule, context) 

1031 return capsule 

1032 

1033def _getattr(objclass, name, repr_str): 

1034 # hack to grab the reference directly 

1035 try: #XXX: works only for __builtin__ ? 

1036 attr = repr_str.split("'")[3] 

1037 return eval(attr+'.__dict__["'+name+'"]') 

1038 except Exception: 

1039 try: 

1040 attr = objclass.__dict__ 

1041 if type(attr) is DictProxyType: 

1042 attr = attr[name] 

1043 else: 

1044 attr = getattr(objclass,name) 

1045 except (AttributeError, KeyError): 

1046 attr = getattr(objclass,name) 

1047 return attr 

1048 

1049def _get_attr(self, name): 

1050 # stop recursive pickling 

1051 return getattr(self, name, None) or getattr(__builtin__, name) 

1052 

1053def _import_module(import_name, safe=False): 

1054 try: 

1055 if import_name.startswith('__runtime__.'): 

1056 return sys.modules[import_name] 

1057 elif '.' in import_name: 

1058 items = import_name.split('.') 

1059 module = '.'.join(items[:-1]) 

1060 obj = items[-1] 

1061 submodule = getattr(__import__(module, None, None, [obj]), obj) 

1062 if isinstance(submodule, (ModuleType, type)): 

1063 return submodule 

1064 return __import__(import_name, None, None, [obj]) 

1065 else: 

1066 return __import__(import_name) 

1067 except (ImportError, AttributeError, KeyError): 

1068 if safe: 

1069 return None 

1070 raise 

1071 

1072# https://github.com/python/cpython/blob/a8912a0f8d9eba6d502c37d522221f9933e976db/Lib/pickle.py#L322-L333 

1073def _getattribute(obj, name): 

1074 for subpath in name.split('.'): 

1075 if subpath == '<locals>': 

1076 raise AttributeError("Can't get local attribute {!r} on {!r}" 

1077 .format(name, obj)) 

1078 try: 

1079 parent = obj 

1080 obj = getattr(obj, subpath) 

1081 except AttributeError: 

1082 raise AttributeError("Can't get attribute {!r} on {!r}" 

1083 .format(name, obj)) 

1084 return obj, parent 

1085 

1086def _locate_function(obj, pickler=None): 

1087 module_name = getattr(obj, '__module__', None) 

1088 if module_name in ['__main__', None] or \ 

1089 pickler and is_dill(pickler, child=False) and pickler._session and module_name == pickler._main.__name__: 

1090 return False 

1091 if hasattr(obj, '__qualname__'): 

1092 module = _import_module(module_name, safe=True) 

1093 try: 

1094 found, _ = _getattribute(module, obj.__qualname__) 

1095 return found is obj 

1096 except AttributeError: 

1097 return False 

1098 else: 

1099 found = _import_module(module_name + '.' + obj.__name__, safe=True) 

1100 return found is obj 

1101 

1102 

1103def _setitems(dest, source): 

1104 for k, v in source.items(): 

1105 dest[k] = v 

1106 

1107 

1108def _save_with_postproc(pickler, reduction, is_pickler_dill=None, obj=Getattr.NO_DEFAULT, postproc_list=None): 

1109 if obj is Getattr.NO_DEFAULT: 

1110 obj = Reduce(reduction) # pragma: no cover 

1111 

1112 if is_pickler_dill is None: 

1113 is_pickler_dill = is_dill(pickler, child=True) 

1114 if is_pickler_dill: 

1115 # assert id(obj) not in pickler._postproc, str(obj) + ' already pushed on stack!' 

1116 # if not hasattr(pickler, 'x'): pickler.x = 0 

1117 # print(pickler.x*' ', 'push', obj, id(obj), pickler._recurse) 

1118 # pickler.x += 1 

1119 if postproc_list is None: 

1120 postproc_list = [] 

1121 

1122 # Recursive object not supported. Default to a global instead. 

1123 if id(obj) in pickler._postproc: 

1124 name = '%s.%s ' % (obj.__module__, getattr(obj, '__qualname__', obj.__name__)) if hasattr(obj, '__module__') else '' 

1125 warnings.warn('Cannot pickle %r: %shas recursive self-references that trigger a RecursionError.' % (obj, name), PicklingWarning) 

1126 pickler.save_global(obj) 

1127 return 

1128 pickler._postproc[id(obj)] = postproc_list 

1129 

1130 # TODO: Use state_setter in Python 3.8 to allow for faster cPickle implementations 

1131 pickler.save_reduce(*reduction, obj=obj) 

1132 

1133 if is_pickler_dill: 

1134 # pickler.x -= 1 

1135 # print(pickler.x*' ', 'pop', obj, id(obj)) 

1136 postproc = pickler._postproc.pop(id(obj)) 

1137 # assert postproc_list == postproc, 'Stack tampered!' 

1138 for reduction in reversed(postproc): 

1139 if reduction[0] is _setitems: 

1140 # use the internal machinery of pickle.py to speedup when 

1141 # updating a dictionary in postproc 

1142 dest, source = reduction[1] 

1143 if source: 

1144 pickler.write(pickler.get(pickler.memo[id(dest)][0])) 

1145 if sys.hexversion < 0x30e00a1: 

1146 pickler._batch_setitems(iter(source.items())) 

1147 else: 

1148 pickler._batch_setitems(iter(source.items()), obj=obj) 

1149 else: 

1150 # Updating with an empty dictionary. Same as doing nothing. 

1151 continue 

1152 else: 

1153 pickler.save_reduce(*reduction) 

1154 # pop None created by calling preprocessing step off stack 

1155 pickler.write(POP) 

1156 

1157#@register(CodeType) 

1158#def save_code(pickler, obj): 

1159# logger.trace(pickler, "Co: %s", obj) 

1160# pickler.save_reduce(_unmarshal, (marshal.dumps(obj),), obj=obj) 

1161# logger.trace(pickler, "# Co") 

1162# return 

1163 

1164# The following function is based on 'save_codeobject' from 'cloudpickle' 

1165# Copyright (c) 2012, Regents of the University of California. 

1166# Copyright (c) 2009 `PiCloud, Inc. <http://www.picloud.com>`_. 

1167# License: https://github.com/cloudpipe/cloudpickle/blob/master/LICENSE 

1168@register(CodeType) 

1169def save_code(pickler, obj): 

1170 logger.trace(pickler, "Co: %s", obj) 

1171 if hasattr(obj, "co_endlinetable"): # python 3.11a (20 args) 

1172 args = ( 

1173 obj.co_lnotab, # for < python 3.10 [not counted in args] 

1174 obj.co_argcount, obj.co_posonlyargcount, 

1175 obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize, 

1176 obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, 

1177 obj.co_varnames, obj.co_filename, obj.co_name, obj.co_qualname, 

1178 obj.co_firstlineno, obj.co_linetable, obj.co_endlinetable, 

1179 obj.co_columntable, obj.co_exceptiontable, obj.co_freevars, 

1180 obj.co_cellvars 

1181 ) 

1182 elif hasattr(obj, "co_exceptiontable"): # python 3.11 (18 args) 

1183 with warnings.catch_warnings(): 

1184 if not OLD312a7: # issue 597 

1185 warnings.filterwarnings('ignore', category=DeprecationWarning) 

1186 args = ( 

1187 obj.co_lnotab, # for < python 3.10 [not counted in args] 

1188 obj.co_argcount, obj.co_posonlyargcount, 

1189 obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize, 

1190 obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, 

1191 obj.co_varnames, obj.co_filename, obj.co_name, obj.co_qualname, 

1192 obj.co_firstlineno, obj.co_linetable, obj.co_exceptiontable, 

1193 obj.co_freevars, obj.co_cellvars 

1194 ) 

1195 elif hasattr(obj, "co_qualname"): # pypy 3.11 7.3.19+ (17 args) 

1196 args = ( 

1197 obj.co_lnotab, obj.co_argcount, obj.co_posonlyargcount, 

1198 obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize, 

1199 obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, 

1200 obj.co_varnames, obj.co_filename, obj.co_name, obj.co_qualname, 

1201 obj.co_firstlineno, obj.co_linetable, obj.co_freevars, 

1202 obj.co_cellvars 

1203 ) 

1204 elif hasattr(obj, "co_linetable"): # python 3.10 (16 args) 

1205 args = ( 

1206 obj.co_lnotab, # for < python 3.10 [not counted in args] 

1207 obj.co_argcount, obj.co_posonlyargcount, 

1208 obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize, 

1209 obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, 

1210 obj.co_varnames, obj.co_filename, obj.co_name, 

1211 obj.co_firstlineno, obj.co_linetable, obj.co_freevars, 

1212 obj.co_cellvars 

1213 ) 

1214 elif hasattr(obj, "co_posonlyargcount"): # python 3.8 (16 args) 

1215 args = ( 

1216 obj.co_argcount, obj.co_posonlyargcount, 

1217 obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize, 

1218 obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, 

1219 obj.co_varnames, obj.co_filename, obj.co_name, 

1220 obj.co_firstlineno, obj.co_lnotab, obj.co_freevars, 

1221 obj.co_cellvars 

1222 ) 

1223 else: # python 3.7 (15 args) 

1224 args = ( 

1225 obj.co_argcount, obj.co_kwonlyargcount, obj.co_nlocals, 

1226 obj.co_stacksize, obj.co_flags, obj.co_code, obj.co_consts, 

1227 obj.co_names, obj.co_varnames, obj.co_filename, 

1228 obj.co_name, obj.co_firstlineno, obj.co_lnotab, 

1229 obj.co_freevars, obj.co_cellvars 

1230 ) 

1231 

1232 pickler.save_reduce(_create_code, args, obj=obj) 

1233 logger.trace(pickler, "# Co") 

1234 return 

1235 

1236def _repr_dict(obj): 

1237 """Make a short string representation of a dictionary.""" 

1238 return "<%s object at %#012x>" % (type(obj).__name__, id(obj)) 

1239 

1240@register(dict) 

1241def save_module_dict(pickler, obj): 

1242 if is_dill(pickler, child=False) and obj == pickler._main.__dict__ and \ 

1243 not (pickler._session and pickler._first_pass): 

1244 logger.trace(pickler, "D1: %s", _repr_dict(obj)) # obj 

1245 pickler.write(bytes('c__builtin__\n__main__\n', 'UTF-8')) 

1246 logger.trace(pickler, "# D1") 

1247 elif (not is_dill(pickler, child=False)) and (obj == _main_module.__dict__): 

1248 logger.trace(pickler, "D3: %s", _repr_dict(obj)) # obj 

1249 pickler.write(bytes('c__main__\n__dict__\n', 'UTF-8')) #XXX: works in general? 

1250 logger.trace(pickler, "# D3") 

1251 elif '__name__' in obj and obj != _main_module.__dict__ \ 

1252 and type(obj['__name__']) is str \ 

1253 and obj is getattr(_import_module(obj['__name__'],True), '__dict__', None): 

1254 logger.trace(pickler, "D4: %s", _repr_dict(obj)) # obj 

1255 pickler.write(bytes('c%s\n__dict__\n' % obj['__name__'], 'UTF-8')) 

1256 logger.trace(pickler, "# D4") 

1257 else: 

1258 logger.trace(pickler, "D2: %s", _repr_dict(obj)) # obj 

1259 if is_dill(pickler, child=False) and pickler._session: 

1260 # we only care about session the first pass thru 

1261 pickler._first_pass = False 

1262 StockPickler.save_dict(pickler, obj) 

1263 logger.trace(pickler, "# D2") 

1264 return 

1265 

1266 

1267if not OLD310 and MAPPING_PROXY_TRICK: 

1268 def save_dict_view(dicttype): 

1269 def save_dict_view_for_function(func): 

1270 def _save_dict_view(pickler, obj): 

1271 logger.trace(pickler, "Dkvi: <%s>", obj) 

1272 mapping = obj.mapping | _dictproxy_helper_instance 

1273 pickler.save_reduce(func, (mapping,), obj=obj) 

1274 logger.trace(pickler, "# Dkvi") 

1275 return _save_dict_view 

1276 return [ 

1277 (funcname, save_dict_view_for_function(getattr(dicttype, funcname))) 

1278 for funcname in ('keys', 'values', 'items') 

1279 ] 

1280else: 

1281 # The following functions are based on 'cloudpickle' 

1282 # https://github.com/cloudpipe/cloudpickle/blob/5d89947288a18029672596a4d719093cc6d5a412/cloudpickle/cloudpickle.py#L922-L940 

1283 # Copyright (c) 2012, Regents of the University of California. 

1284 # Copyright (c) 2009 `PiCloud, Inc. <http://www.picloud.com>`_. 

1285 # License: https://github.com/cloudpipe/cloudpickle/blob/master/LICENSE 

1286 def save_dict_view(dicttype): 

1287 def save_dict_keys(pickler, obj): 

1288 logger.trace(pickler, "Dk: <%s>", obj) 

1289 dict_constructor = _shims.Reduce(dicttype.fromkeys, (list(obj),)) 

1290 pickler.save_reduce(dicttype.keys, (dict_constructor,), obj=obj) 

1291 logger.trace(pickler, "# Dk") 

1292 

1293 def save_dict_values(pickler, obj): 

1294 logger.trace(pickler, "Dv: <%s>", obj) 

1295 dict_constructor = _shims.Reduce(dicttype, (enumerate(obj),)) 

1296 pickler.save_reduce(dicttype.values, (dict_constructor,), obj=obj) 

1297 logger.trace(pickler, "# Dv") 

1298 

1299 def save_dict_items(pickler, obj): 

1300 logger.trace(pickler, "Di: <%s>", obj) 

1301 pickler.save_reduce(dicttype.items, (dicttype(obj),), obj=obj) 

1302 logger.trace(pickler, "# Di") 

1303 

1304 return ( 

1305 ('keys', save_dict_keys), 

1306 ('values', save_dict_values), 

1307 ('items', save_dict_items) 

1308 ) 

1309 

1310for __dicttype in ( 

1311 dict, 

1312 OrderedDict 

1313): 

1314 __obj = __dicttype() 

1315 for __funcname, __savefunc in save_dict_view(__dicttype): 

1316 __tview = type(getattr(__obj, __funcname)()) 

1317 if __tview not in Pickler.dispatch: 

1318 Pickler.dispatch[__tview] = __savefunc 

1319del __dicttype, __obj, __funcname, __tview, __savefunc 

1320 

1321 

1322@register(ClassType) 

1323def save_classobj(pickler, obj): #FIXME: enable pickler._byref 

1324 if not _locate_function(obj, pickler): 

1325 logger.trace(pickler, "C1: %s", obj) 

1326 pickler.save_reduce(ClassType, (obj.__name__, obj.__bases__, 

1327 obj.__dict__), obj=obj) 

1328 #XXX: or obj.__dict__.copy()), obj=obj) ? 

1329 logger.trace(pickler, "# C1") 

1330 else: 

1331 logger.trace(pickler, "C2: %s", obj) 

1332 name = getattr(obj, '__qualname__', getattr(obj, '__name__', None)) 

1333 StockPickler.save_global(pickler, obj, name=name) 

1334 logger.trace(pickler, "# C2") 

1335 return 

1336 

1337@register(typing._GenericAlias) 

1338def save_generic_alias(pickler, obj): 

1339 args = obj.__args__ 

1340 if type(obj.__reduce__()) is str: 

1341 logger.trace(pickler, "Ga0: %s", obj) 

1342 StockPickler.save_global(pickler, obj, name=obj.__reduce__()) 

1343 logger.trace(pickler, "# Ga0") 

1344 elif obj.__origin__ is tuple and (not args or args == ((),)): 

1345 logger.trace(pickler, "Ga1: %s", obj) 

1346 pickler.save_reduce(_create_typing_tuple, (args,), obj=obj) 

1347 logger.trace(pickler, "# Ga1") 

1348 else: 

1349 logger.trace(pickler, "Ga2: %s", obj) 

1350 StockPickler.save_reduce(pickler, *obj.__reduce__(), obj=obj) 

1351 logger.trace(pickler, "# Ga2") 

1352 return 

1353 

1354if ThreadHandleType: 

1355 @register(ThreadHandleType) 

1356 def save_thread_handle(pickler, obj): 

1357 logger.trace(pickler, "Th: %s", obj) 

1358 pickler.save_reduce(_create_thread_handle, (obj.ident, obj.is_done()), obj=obj) 

1359 logger.trace(pickler, "# Th") 

1360 return 

1361 

1362@register(LockType) #XXX: copied Thread will have new Event (due to new Lock) 

1363def save_lock(pickler, obj): 

1364 logger.trace(pickler, "Lo: %s", obj) 

1365 pickler.save_reduce(_create_lock, (obj.locked(),), obj=obj) 

1366 logger.trace(pickler, "# Lo") 

1367 return 

1368 

1369@register(RLockType) 

1370def save_rlock(pickler, obj): 

1371 logger.trace(pickler, "RL: %s", obj) 

1372 r = obj.__repr__() # don't use _release_save as it unlocks the lock 

1373 count = int(r.split('count=')[1].split()[0].rstrip('>')) 

1374 owner = int(r.split('owner=')[1].split()[0]) 

1375 pickler.save_reduce(_create_rlock, (count,owner,), obj=obj) 

1376 logger.trace(pickler, "# RL") 

1377 return 

1378 

1379#@register(SocketType) #FIXME: causes multiprocess test_pickling FAIL 

1380def save_socket(pickler, obj): 

1381 logger.trace(pickler, "So: %s", obj) 

1382 pickler.save_reduce(*reduce_socket(obj)) 

1383 logger.trace(pickler, "# So") 

1384 return 

1385 

1386def _save_file(pickler, obj, open_): 

1387 if obj.closed: 

1388 position = 0 

1389 else: 

1390 obj.flush() 

1391 if obj in (sys.__stdout__, sys.__stderr__, sys.__stdin__): 

1392 position = -1 

1393 else: 

1394 position = obj.tell() 

1395 if is_dill(pickler, child=True) and pickler._fmode == FILE_FMODE: 

1396 f = open_(obj.name, "r") 

1397 fdata = f.read() 

1398 f.close() 

1399 else: 

1400 fdata = "" 

1401 if is_dill(pickler, child=True): 

1402 strictio = pickler._strictio 

1403 fmode = pickler._fmode 

1404 else: 

1405 strictio = False 

1406 fmode = 0 # HANDLE_FMODE 

1407 pickler.save_reduce(_create_filehandle, (obj.name, obj.mode, position, 

1408 obj.closed, open_, strictio, 

1409 fmode, fdata), obj=obj) 

1410 return 

1411 

1412 

1413@register(FileType) #XXX: in 3.x has buffer=0, needs different _create? 

1414@register(BufferedReaderType) 

1415@register(BufferedWriterType) 

1416@register(TextWrapperType) 

1417def save_file(pickler, obj): 

1418 logger.trace(pickler, "Fi: %s", obj) 

1419 f = _save_file(pickler, obj, open) 

1420 logger.trace(pickler, "# Fi") 

1421 return f 

1422 

1423if BufferedRandomType: 

1424 @register(BufferedRandomType) 

1425 def save_file(pickler, obj): 

1426 logger.trace(pickler, "Fi: %s", obj) 

1427 f = _save_file(pickler, obj, open) 

1428 logger.trace(pickler, "# Fi") 

1429 return f 

1430 

1431if PyTextWrapperType: 

1432 @register(PyBufferedReaderType) 

1433 @register(PyBufferedWriterType) 

1434 @register(PyTextWrapperType) 

1435 def save_file(pickler, obj): 

1436 logger.trace(pickler, "Fi: %s", obj) 

1437 f = _save_file(pickler, obj, _open) 

1438 logger.trace(pickler, "# Fi") 

1439 return f 

1440 

1441 if PyBufferedRandomType: 

1442 @register(PyBufferedRandomType) 

1443 def save_file(pickler, obj): 

1444 logger.trace(pickler, "Fi: %s", obj) 

1445 f = _save_file(pickler, obj, _open) 

1446 logger.trace(pickler, "# Fi") 

1447 return f 

1448 

1449 

1450# The following two functions are based on 'saveCStringIoInput' 

1451# and 'saveCStringIoOutput' from spickle 

1452# Copyright (c) 2011 by science+computing ag 

1453# License: http://www.apache.org/licenses/LICENSE-2.0 

1454if InputType: 

1455 @register(InputType) 

1456 def save_stringi(pickler, obj): 

1457 logger.trace(pickler, "Io: %s", obj) 

1458 if obj.closed: 

1459 value = ''; position = 0 

1460 else: 

1461 value = obj.getvalue(); position = obj.tell() 

1462 pickler.save_reduce(_create_stringi, (value, position, \ 

1463 obj.closed), obj=obj) 

1464 logger.trace(pickler, "# Io") 

1465 return 

1466 

1467 @register(OutputType) 

1468 def save_stringo(pickler, obj): 

1469 logger.trace(pickler, "Io: %s", obj) 

1470 if obj.closed: 

1471 value = ''; position = 0 

1472 else: 

1473 value = obj.getvalue(); position = obj.tell() 

1474 pickler.save_reduce(_create_stringo, (value, position, \ 

1475 obj.closed), obj=obj) 

1476 logger.trace(pickler, "# Io") 

1477 return 

1478 

1479if LRUCacheType is not None: 

1480 from functools import lru_cache 

1481 @register(LRUCacheType) 

1482 def save_lru_cache(pickler, obj): 

1483 logger.trace(pickler, "LRU: %s", obj) 

1484 if OLD39: 

1485 kwargs = obj.cache_info() 

1486 args = (kwargs.maxsize,) 

1487 else: 

1488 kwargs = obj.cache_parameters() 

1489 args = (kwargs['maxsize'], kwargs['typed']) 

1490 if args != lru_cache.__defaults__: 

1491 wrapper = Reduce(lru_cache, args, is_callable=True) 

1492 else: 

1493 wrapper = lru_cache 

1494 pickler.save_reduce(wrapper, (obj.__wrapped__,), obj=obj) 

1495 logger.trace(pickler, "# LRU") 

1496 return 

1497 

1498@register(SuperType) 

1499def save_super(pickler, obj): 

1500 logger.trace(pickler, "Su: %s", obj) 

1501 pickler.save_reduce(super, (obj.__thisclass__, obj.__self__), obj=obj) 

1502 logger.trace(pickler, "# Su") 

1503 return 

1504 

1505if IS_PYPY: 

1506 @register(MethodType) 

1507 def save_instancemethod0(pickler, obj): 

1508 code = getattr(obj.__func__, '__code__', None) 

1509 if code is not None and type(code) is not CodeType \ 

1510 and getattr(obj.__self__, obj.__name__) == obj: 

1511 # Some PyPy builtin functions have no module name 

1512 logger.trace(pickler, "Me2: %s", obj) 

1513 # TODO: verify that this works for all PyPy builtin methods 

1514 pickler.save_reduce(getattr, (obj.__self__, obj.__name__), obj=obj) 

1515 logger.trace(pickler, "# Me2") 

1516 return 

1517 

1518 logger.trace(pickler, "Me1: %s", obj) 

1519 pickler.save_reduce(MethodType, (obj.__func__, obj.__self__), obj=obj) 

1520 logger.trace(pickler, "# Me1") 

1521 return 

1522else: 

1523 @register(MethodType) 

1524 def save_instancemethod0(pickler, obj): 

1525 logger.trace(pickler, "Me1: %s", obj) 

1526 pickler.save_reduce(MethodType, (obj.__func__, obj.__self__), obj=obj) 

1527 logger.trace(pickler, "# Me1") 

1528 return 

1529 

1530if not IS_PYPY: 

1531 @register(MemberDescriptorType) 

1532 @register(GetSetDescriptorType) 

1533 @register(MethodDescriptorType) 

1534 @register(WrapperDescriptorType) 

1535 @register(ClassMethodDescriptorType) 

1536 def save_wrapper_descriptor(pickler, obj): 

1537 logger.trace(pickler, "Wr: %s", obj) 

1538 pickler.save_reduce(_getattr, (obj.__objclass__, obj.__name__, 

1539 obj.__repr__()), obj=obj) 

1540 logger.trace(pickler, "# Wr") 

1541 return 

1542else: 

1543 @register(MemberDescriptorType) 

1544 @register(GetSetDescriptorType) 

1545 def save_wrapper_descriptor(pickler, obj): 

1546 logger.trace(pickler, "Wr: %s", obj) 

1547 pickler.save_reduce(_getattr, (obj.__objclass__, obj.__name__, 

1548 obj.__repr__()), obj=obj) 

1549 logger.trace(pickler, "# Wr") 

1550 return 

1551 

1552@register(CellType) 

1553def save_cell(pickler, obj): 

1554 try: 

1555 f = obj.cell_contents 

1556 except ValueError: # cell is empty 

1557 logger.trace(pickler, "Ce3: %s", obj) 

1558 # _shims._CELL_EMPTY is defined in _shims.py to support PyPy 2.7. 

1559 # It unpickles to a sentinel object _dill._CELL_EMPTY, also created in 

1560 # _shims.py. This object is not present in Python 3 because the cell's 

1561 # contents can be deleted in newer versions of Python. The reduce object 

1562 # will instead unpickle to None if unpickled in Python 3. 

1563 

1564 # When breaking changes are made to dill, (_shims._CELL_EMPTY,) can 

1565 # be replaced by () OR the delattr function can be removed repending on 

1566 # whichever is more convienient. 

1567 pickler.save_reduce(_create_cell, (_shims._CELL_EMPTY,), obj=obj) 

1568 # Call the function _delattr on the cell's cell_contents attribute 

1569 # The result of this function call will be None 

1570 pickler.save_reduce(_shims._delattr, (obj, 'cell_contents')) 

1571 # pop None created by calling _delattr off stack 

1572 pickler.write(POP) 

1573 logger.trace(pickler, "# Ce3") 

1574 return 

1575 if is_dill(pickler, child=True): 

1576 if id(f) in pickler._postproc: 

1577 # Already seen. Add to its postprocessing. 

1578 postproc = pickler._postproc[id(f)] 

1579 else: 

1580 # Haven't seen it. Add to the highest possible object and set its 

1581 # value as late as possible to prevent cycle. 

1582 postproc = next(iter(pickler._postproc.values()), None) 

1583 if postproc is not None: 

1584 logger.trace(pickler, "Ce2: %s", obj) 

1585 # _CELL_REF is defined in _shims.py to support older versions of 

1586 # dill. When breaking changes are made to dill, (_CELL_REF,) can 

1587 # be replaced by () 

1588 pickler.save_reduce(_create_cell, (_CELL_REF,), obj=obj) 

1589 postproc.append((_shims._setattr, (obj, 'cell_contents', f))) 

1590 logger.trace(pickler, "# Ce2") 

1591 return 

1592 logger.trace(pickler, "Ce1: %s", obj) 

1593 pickler.save_reduce(_create_cell, (f,), obj=obj) 

1594 logger.trace(pickler, "# Ce1") 

1595 return 

1596 

1597if MAPPING_PROXY_TRICK: 

1598 @register(DictProxyType) 

1599 def save_dictproxy(pickler, obj): 

1600 logger.trace(pickler, "Mp: %s", _repr_dict(obj)) # obj 

1601 mapping = obj | _dictproxy_helper_instance 

1602 pickler.save_reduce(DictProxyType, (mapping,), obj=obj) 

1603 logger.trace(pickler, "# Mp") 

1604 return 

1605else: 

1606 @register(DictProxyType) 

1607 def save_dictproxy(pickler, obj): 

1608 logger.trace(pickler, "Mp: %s", _repr_dict(obj)) # obj 

1609 pickler.save_reduce(DictProxyType, (obj.copy(),), obj=obj) 

1610 logger.trace(pickler, "# Mp") 

1611 return 

1612 

1613@register(SliceType) 

1614def save_slice(pickler, obj): 

1615 logger.trace(pickler, "Sl: %s", obj) 

1616 pickler.save_reduce(slice, (obj.start, obj.stop, obj.step), obj=obj) 

1617 logger.trace(pickler, "# Sl") 

1618 return 

1619 

1620@register(XRangeType) 

1621@register(EllipsisType) 

1622@register(NotImplementedType) 

1623def save_singleton(pickler, obj): 

1624 logger.trace(pickler, "Si: %s", obj) 

1625 pickler.save_reduce(_eval_repr, (obj.__repr__(),), obj=obj) 

1626 logger.trace(pickler, "# Si") 

1627 return 

1628 

1629def _proxy_helper(obj): # a dead proxy returns a reference to None 

1630 """get memory address of proxy's reference object""" 

1631 _repr = repr(obj) 

1632 try: _str = str(obj) 

1633 except ReferenceError: # it's a dead proxy 

1634 return id(None) 

1635 if _str == _repr: return id(obj) # it's a repr 

1636 try: # either way, it's a proxy from here 

1637 address = int(_str.rstrip('>').split(' at ')[-1], base=16) 

1638 except ValueError: # special case: proxy of a 'type' 

1639 if not IS_PYPY: 

1640 address = int(_repr.rstrip('>').split(' at ')[-1], base=16) 

1641 else: 

1642 objects = iter(gc.get_objects()) 

1643 for _obj in objects: 

1644 if repr(_obj) == _str: return id(_obj) 

1645 # all bad below... nothing found so throw ReferenceError 

1646 msg = "Cannot reference object for proxy at '%s'" % id(obj) 

1647 raise ReferenceError(msg) 

1648 return address 

1649 

1650def _locate_object(address, module=None): 

1651 """get object located at the given memory address (inverse of id(obj))""" 

1652 special = [None, True, False] #XXX: more...? 

1653 for obj in special: 

1654 if address == id(obj): return obj 

1655 if module: 

1656 objects = iter(module.__dict__.values()) 

1657 else: objects = iter(gc.get_objects()) 

1658 for obj in objects: 

1659 if address == id(obj): return obj 

1660 # all bad below... nothing found so throw ReferenceError or TypeError 

1661 try: address = hex(address) 

1662 except TypeError: 

1663 raise TypeError("'%s' is not a valid memory address" % str(address)) 

1664 raise ReferenceError("Cannot reference object at '%s'" % address) 

1665 

1666@register(ReferenceType) 

1667def save_weakref(pickler, obj): 

1668 refobj = obj() 

1669 logger.trace(pickler, "R1: %s", obj) 

1670 #refobj = ctypes.pythonapi.PyWeakref_GetObject(obj) # dead returns "None" 

1671 pickler.save_reduce(_create_weakref, (refobj,), obj=obj) 

1672 logger.trace(pickler, "# R1") 

1673 return 

1674 

1675@register(ProxyType) 

1676@register(CallableProxyType) 

1677def save_weakproxy(pickler, obj): 

1678 # Must do string substitution here and use %r to avoid ReferenceError. 

1679 logger.trace(pickler, "R2: %r" % obj) 

1680 refobj = _locate_object(_proxy_helper(obj)) 

1681 pickler.save_reduce(_create_weakproxy, (refobj, callable(obj)), obj=obj) 

1682 logger.trace(pickler, "# R2") 

1683 return 

1684 

1685def _is_builtin_module(module): 

1686 if not hasattr(module, "__file__"): return True 

1687 if module.__file__ is None: return False 

1688 # If a module file name starts with prefix, it should be a builtin 

1689 # module, so should always be pickled as a reference. 

1690 names = ["base_prefix", "base_exec_prefix", "exec_prefix", "prefix", "real_prefix"] 

1691 rp = os.path.realpath 

1692 # See https://github.com/uqfoundation/dill/issues/566 

1693 return ( 

1694 any( 

1695 module.__file__.startswith(getattr(sys, name)) 

1696 or rp(module.__file__).startswith(rp(getattr(sys, name))) 

1697 for name in names 

1698 if hasattr(sys, name) 

1699 ) 

1700 or module.__file__.endswith(EXTENSION_SUFFIXES) 

1701 or 'site-packages' in module.__file__ 

1702 ) 

1703 

1704def _is_imported_module(module): 

1705 return getattr(module, '__loader__', None) is not None or module in sys.modules.values() 

1706 

1707@register(ModuleType) 

1708def save_module(pickler, obj): 

1709 if False: #_use_diff: 

1710 if obj.__name__.split('.', 1)[0] != "dill": 

1711 try: 

1712 changed = diff.whats_changed(obj, seen=pickler._diff_cache)[0] 

1713 except RuntimeError: # not memorised module, probably part of dill 

1714 pass 

1715 else: 

1716 logger.trace(pickler, "M2: %s with diff", obj) 

1717 logger.info("Diff: %s", changed.keys()) 

1718 pickler.save_reduce(_import_module, (obj.__name__,), obj=obj, 

1719 state=changed) 

1720 logger.trace(pickler, "# M2") 

1721 return 

1722 

1723 logger.trace(pickler, "M1: %s", obj) 

1724 pickler.save_reduce(_import_module, (obj.__name__,), obj=obj) 

1725 logger.trace(pickler, "# M1") 

1726 else: 

1727 builtin_mod = _is_builtin_module(obj) 

1728 is_session_main = is_dill(pickler, child=True) and obj is pickler._main 

1729 if (obj.__name__ not in ("builtins", "dill", "dill._dill") and not builtin_mod 

1730 or is_session_main): 

1731 logger.trace(pickler, "M1: %s", obj) 

1732 # Hack for handling module-type objects in load_module(). 

1733 mod_name = obj.__name__ if _is_imported_module(obj) else '__runtime__.%s' % obj.__name__ 

1734 # Second references are saved as __builtin__.__main__ in save_module_dict(). 

1735 main_dict = obj.__dict__.copy() 

1736 for item in ('__builtins__', '__loader__'): 

1737 main_dict.pop(item, None) 

1738 for item in IPYTHON_SINGLETONS: #pragma: no cover 

1739 if getattr(main_dict.get(item), '__module__', '').startswith('IPython'): 

1740 del main_dict[item] 

1741 pickler.save_reduce(_import_module, (mod_name,), obj=obj, state=main_dict) 

1742 logger.trace(pickler, "# M1") 

1743 elif obj.__name__ == "dill._dill": 

1744 logger.trace(pickler, "M2: %s", obj) 

1745 pickler.save_global(obj, name="_dill") 

1746 logger.trace(pickler, "# M2") 

1747 else: 

1748 logger.trace(pickler, "M2: %s", obj) 

1749 pickler.save_reduce(_import_module, (obj.__name__,), obj=obj) 

1750 logger.trace(pickler, "# M2") 

1751 return 

1752 

1753# The following function is based on '_extract_class_dict' from 'cloudpickle' 

1754# Copyright (c) 2012, Regents of the University of California. 

1755# Copyright (c) 2009 `PiCloud, Inc. <http://www.picloud.com>`_. 

1756# License: https://github.com/cloudpipe/cloudpickle/blob/master/LICENSE 

1757def _get_typedict_type(cls, clsdict, attrs, postproc_list): 

1758 """Retrieve a copy of the dict of a class without the inherited methods""" 

1759 if len(cls.__bases__) == 1: 

1760 inherited_dict = cls.__bases__[0].__dict__ 

1761 else: 

1762 inherited_dict = {} 

1763 for base in reversed(cls.__bases__): 

1764 inherited_dict.update(base.__dict__) 

1765 to_remove = [] 

1766 for name, value in dict.items(clsdict): 

1767 try: 

1768 base_value = inherited_dict[name] 

1769 if value is base_value and hasattr(value, '__qualname__'): 

1770 to_remove.append(name) 

1771 except KeyError: 

1772 pass 

1773 for name in to_remove: 

1774 dict.pop(clsdict, name) 

1775 

1776 if issubclass(type(cls), type): 

1777 clsdict.pop('__dict__', None) 

1778 clsdict.pop('__weakref__', None) 

1779 # clsdict.pop('__prepare__', None) 

1780 return clsdict, attrs 

1781 

1782def _get_typedict_abc(obj, _dict, attrs, postproc_list): 

1783 if hasattr(abc, '_get_dump'): 

1784 (registry, _, _, _) = abc._get_dump(obj) 

1785 register = obj.register 

1786 postproc_list.extend((register, (reg(),)) for reg in registry) 

1787 elif hasattr(obj, '_abc_registry'): 

1788 registry = obj._abc_registry 

1789 register = obj.register 

1790 postproc_list.extend((register, (reg,)) for reg in registry) 

1791 else: 

1792 raise PicklingError("Cannot find registry of ABC %s", obj) 

1793 

1794 if '_abc_registry' in _dict: 

1795 _dict.pop('_abc_registry', None) 

1796 _dict.pop('_abc_cache', None) 

1797 _dict.pop('_abc_negative_cache', None) 

1798 # _dict.pop('_abc_negative_cache_version', None) 

1799 else: 

1800 _dict.pop('_abc_impl', None) 

1801 return _dict, attrs 

1802 

1803@register(TypeType) 

1804def save_type(pickler, obj, postproc_list=None): 

1805 if obj in _typemap: 

1806 logger.trace(pickler, "T1: %s", obj) 

1807 # if obj in _incedental_types: 

1808 # warnings.warn('Type %r may only exist on this implementation of Python and cannot be unpickled in other implementations.' % (obj,), PicklingWarning) 

1809 pickler.save_reduce(_load_type, (_typemap[obj],), obj=obj) 

1810 logger.trace(pickler, "# T1") 

1811 elif obj.__bases__ == (tuple,) and all([hasattr(obj, attr) for attr in ('_fields','_asdict','_make','_replace')]): 

1812 # special case: namedtuples 

1813 logger.trace(pickler, "T6: %s", obj) 

1814 

1815 obj_name = getattr(obj, '__qualname__', getattr(obj, '__name__', None)) 

1816 if obj.__name__ != obj_name: 

1817 if postproc_list is None: 

1818 postproc_list = [] 

1819 postproc_list.append((setattr, (obj, '__qualname__', obj_name))) 

1820 

1821 if not obj._field_defaults: 

1822 _save_with_postproc(pickler, (_create_namedtuple, (obj.__name__, obj._fields, obj.__module__)), obj=obj, postproc_list=postproc_list) 

1823 else: 

1824 defaults = [obj._field_defaults[field] for field in obj._fields if field in obj._field_defaults] 

1825 _save_with_postproc(pickler, (_create_namedtuple, (obj.__name__, obj._fields, obj.__module__, defaults)), obj=obj, postproc_list=postproc_list) 

1826 logger.trace(pickler, "# T6") 

1827 return 

1828 

1829 # special caes: NoneType, NotImplementedType, EllipsisType, EnumMeta, etc 

1830 elif obj is type(None): 

1831 logger.trace(pickler, "T7: %s", obj) 

1832 #XXX: pickler.save_reduce(type, (None,), obj=obj) 

1833 pickler.write(GLOBAL + b'__builtin__\nNoneType\n') 

1834 logger.trace(pickler, "# T7") 

1835 elif obj is NotImplementedType: 

1836 logger.trace(pickler, "T7: %s", obj) 

1837 pickler.save_reduce(type, (NotImplemented,), obj=obj) 

1838 logger.trace(pickler, "# T7") 

1839 elif obj is EllipsisType: 

1840 logger.trace(pickler, "T7: %s", obj) 

1841 pickler.save_reduce(type, (Ellipsis,), obj=obj) 

1842 logger.trace(pickler, "# T7") 

1843 elif obj is EnumMeta: 

1844 logger.trace(pickler, "T7: %s", obj) 

1845 pickler.write(GLOBAL + b'enum\nEnumMeta\n') 

1846 logger.trace(pickler, "# T7") 

1847 elif obj is ExceptHookArgsType: #NOTE: must be after NoneType for pypy 

1848 logger.trace(pickler, "T7: %s", obj) 

1849 pickler.write(GLOBAL + b'threading\nExceptHookArgs\n') 

1850 logger.trace(pickler, "# T7") 

1851 

1852 else: 

1853 _byref = getattr(pickler, '_byref', None) 

1854 obj_recursive = id(obj) in getattr(pickler, '_postproc', ()) 

1855 incorrectly_named = not _locate_function(obj, pickler) 

1856 if not _byref and not obj_recursive and incorrectly_named: # not a function, but the name was held over 

1857 if postproc_list is None: 

1858 postproc_list = [] 

1859 

1860 # thanks to Tom Stepleton pointing out pickler._session unneeded 

1861 logger.trace(pickler, "T2: %s", obj) 

1862 _dict, attrs = _get_typedict_type(obj, obj.__dict__.copy(), None, postproc_list) # copy dict proxy to a dict 

1863 

1864 #print (_dict) 

1865 #print ("%s\n%s" % (type(obj), obj.__name__)) 

1866 #print ("%s\n%s" % (obj.__bases__, obj.__dict__)) 

1867 slots = _dict.get('__slots__', ()) 

1868 if type(slots) == str: 

1869 # __slots__ accepts a single string 

1870 slots = (slots,) 

1871 

1872 for name in slots: 

1873 _dict.pop(name, None) 

1874 

1875 if isinstance(obj, abc.ABCMeta): 

1876 logger.trace(pickler, "ABC: %s", obj) 

1877 _dict, attrs = _get_typedict_abc(obj, _dict, attrs, postproc_list) 

1878 logger.trace(pickler, "# ABC") 

1879 

1880 qualname = getattr(obj, '__qualname__', None) 

1881 if attrs is not None: 

1882 for k, v in attrs.items(): 

1883 postproc_list.append((setattr, (obj, k, v))) 

1884 # TODO: Consider using the state argument to save_reduce? 

1885 if qualname is not None: 

1886 postproc_list.append((setattr, (obj, '__qualname__', qualname))) 

1887 

1888 if not hasattr(obj, '__orig_bases__'): 

1889 _save_with_postproc(pickler, (_create_type, ( 

1890 type(obj), obj.__name__, obj.__bases__, _dict 

1891 )), obj=obj, postproc_list=postproc_list) 

1892 else: 

1893 # This case will always work, but might be overkill. 

1894 _metadict = { 

1895 'metaclass': type(obj) 

1896 } 

1897 

1898 if _dict: 

1899 _dict_update = PartialType(_setitems, source=_dict) 

1900 else: 

1901 _dict_update = None 

1902 

1903 _save_with_postproc(pickler, (new_class, ( 

1904 obj.__name__, obj.__orig_bases__, _metadict, _dict_update 

1905 )), obj=obj, postproc_list=postproc_list) 

1906 logger.trace(pickler, "# T2") 

1907 else: 

1908 obj_name = getattr(obj, '__qualname__', getattr(obj, '__name__', None)) 

1909 logger.trace(pickler, "T4: %s", obj) 

1910 if incorrectly_named: 

1911 warnings.warn( 

1912 "Cannot locate reference to %r." % (obj,), 

1913 PicklingWarning, 

1914 stacklevel=3, 

1915 ) 

1916 if obj_recursive: 

1917 warnings.warn( 

1918 "Cannot pickle %r: %s.%s has recursive self-references that " 

1919 "trigger a RecursionError." % (obj, obj.__module__, obj_name), 

1920 PicklingWarning, 

1921 stacklevel=3, 

1922 ) 

1923 #print (obj.__dict__) 

1924 #print ("%s\n%s" % (type(obj), obj.__name__)) 

1925 #print ("%s\n%s" % (obj.__bases__, obj.__dict__)) 

1926 StockPickler.save_global(pickler, obj, name=obj_name) 

1927 logger.trace(pickler, "# T4") 

1928 return 

1929 

1930@register(property) 

1931@register(abc.abstractproperty) 

1932def save_property(pickler, obj): 

1933 logger.trace(pickler, "Pr: %s", obj) 

1934 pickler.save_reduce(type(obj), (obj.fget, obj.fset, obj.fdel, obj.__doc__), 

1935 obj=obj) 

1936 logger.trace(pickler, "# Pr") 

1937 

1938@register(staticmethod) 

1939@register(classmethod) 

1940@register(abc.abstractstaticmethod) 

1941@register(abc.abstractclassmethod) 

1942def save_classmethod(pickler, obj): 

1943 logger.trace(pickler, "Cm: %s", obj) 

1944 orig_func = obj.__func__ 

1945 

1946 # if type(obj.__dict__) is dict: 

1947 # if obj.__dict__: 

1948 # state = obj.__dict__ 

1949 # else: 

1950 # state = None 

1951 # else: 

1952 # state = (None, {'__dict__', obj.__dict__}) 

1953 

1954 pickler.save_reduce(type(obj), (orig_func,), obj=obj) 

1955 logger.trace(pickler, "# Cm") 

1956 

1957@register(FunctionType) 

1958def save_function(pickler, obj): 

1959 if not _locate_function(obj, pickler): 

1960 if type(obj.__code__) is not CodeType: 

1961 # Some PyPy builtin functions have no module name, and thus are not 

1962 # able to be located 

1963 module_name = getattr(obj, '__module__', None) 

1964 if module_name is None: 

1965 module_name = __builtin__.__name__ 

1966 module = _import_module(module_name, safe=True) 

1967 _pypy_builtin = False 

1968 try: 

1969 found, _ = _getattribute(module, obj.__qualname__) 

1970 if getattr(found, '__func__', None) is obj: 

1971 _pypy_builtin = True 

1972 except AttributeError: 

1973 pass 

1974 

1975 if _pypy_builtin: 

1976 logger.trace(pickler, "F3: %s", obj) 

1977 pickler.save_reduce(getattr, (found, '__func__'), obj=obj) 

1978 logger.trace(pickler, "# F3") 

1979 return 

1980 

1981 logger.trace(pickler, "F1: %s", obj) 

1982 _recurse = getattr(pickler, '_recurse', None) 

1983 _postproc = getattr(pickler, '_postproc', None) 

1984 _main_modified = getattr(pickler, '_main_modified', None) 

1985 _original_main = getattr(pickler, '_original_main', __builtin__)#'None' 

1986 postproc_list = [] 

1987 if _recurse: 

1988 # recurse to get all globals referred to by obj 

1989 from .detect import globalvars 

1990 globs_copy = globalvars(obj, recurse=True, builtin=True) 

1991 

1992 # Add the name of the module to the globs dictionary to prevent 

1993 # the duplication of the dictionary. Pickle the unpopulated 

1994 # globals dictionary and set the remaining items after the function 

1995 # is created to correctly handle recursion. 

1996 globs = {'__name__': obj.__module__} 

1997 else: 

1998 globs_copy = obj.__globals__ 

1999 

2000 # If the globals is the __dict__ from the module being saved as a 

2001 # session, substitute it by the dictionary being actually saved. 

2002 if _main_modified and globs_copy is _original_main.__dict__: 

2003 globs_copy = getattr(pickler, '_main', _original_main).__dict__ 

2004 globs = globs_copy 

2005 # If the globals is a module __dict__, do not save it in the pickle. 

2006 elif globs_copy is not None and obj.__module__ is not None and \ 

2007 getattr(_import_module(obj.__module__, True), '__dict__', None) is globs_copy: 

2008 globs = globs_copy 

2009 else: 

2010 globs = {'__name__': obj.__module__} 

2011 

2012 if globs_copy is not None and globs is not globs_copy: 

2013 # In the case that the globals are copied, we need to ensure that 

2014 # the globals dictionary is updated when all objects in the 

2015 # dictionary are already created. 

2016 glob_ids = {id(g) for g in globs_copy.values()} 

2017 for stack_element in _postproc: 

2018 if stack_element in glob_ids: 

2019 _postproc[stack_element].append((_setitems, (globs, globs_copy))) 

2020 break 

2021 else: 

2022 postproc_list.append((_setitems, (globs, globs_copy))) 

2023 

2024 closure = obj.__closure__ 

2025 state_dict = {} 

2026 for fattrname in ('__doc__', '__kwdefaults__', '__annotations__'): 

2027 fattr = getattr(obj, fattrname, None) 

2028 if fattr is not None: 

2029 state_dict[fattrname] = fattr 

2030 if obj.__qualname__ != obj.__name__: 

2031 state_dict['__qualname__'] = obj.__qualname__ 

2032 if '__name__' not in globs or obj.__module__ != globs['__name__']: 

2033 state_dict['__module__'] = obj.__module__ 

2034 

2035 state = obj.__dict__ 

2036 if type(state) is not dict: 

2037 state_dict['__dict__'] = state 

2038 state = None 

2039 if state_dict: 

2040 state = state, state_dict 

2041 

2042 _save_with_postproc(pickler, (_create_function, ( 

2043 obj.__code__, globs, obj.__name__, obj.__defaults__, 

2044 closure 

2045 ), state), obj=obj, postproc_list=postproc_list) 

2046 

2047 # Lift closure cell update to earliest function (#458) 

2048 if _postproc: 

2049 topmost_postproc = next(iter(_postproc.values()), None) 

2050 if closure and topmost_postproc: 

2051 for cell in closure: 

2052 possible_postproc = (setattr, (cell, 'cell_contents', obj)) 

2053 try: 

2054 topmost_postproc.remove(possible_postproc) 

2055 except ValueError: 

2056 continue 

2057 

2058 # Change the value of the cell 

2059 pickler.save_reduce(*possible_postproc) 

2060 # pop None created by calling preprocessing step off stack 

2061 pickler.write(POP) 

2062 

2063 logger.trace(pickler, "# F1") 

2064 else: 

2065 logger.trace(pickler, "F2: %s", obj) 

2066 name = getattr(obj, '__qualname__', getattr(obj, '__name__', None)) 

2067 StockPickler.save_global(pickler, obj, name=name) 

2068 logger.trace(pickler, "# F2") 

2069 return 

2070 

2071if HAS_CTYPES and hasattr(ctypes, 'pythonapi'): 

2072 _PyCapsule_New = ctypes.pythonapi.PyCapsule_New 

2073 _PyCapsule_New.argtypes = (ctypes.c_void_p, ctypes.c_char_p, ctypes.c_void_p) 

2074 _PyCapsule_New.restype = ctypes.py_object 

2075 _PyCapsule_GetPointer = ctypes.pythonapi.PyCapsule_GetPointer 

2076 _PyCapsule_GetPointer.argtypes = (ctypes.py_object, ctypes.c_char_p) 

2077 _PyCapsule_GetPointer.restype = ctypes.c_void_p 

2078 _PyCapsule_GetDestructor = ctypes.pythonapi.PyCapsule_GetDestructor 

2079 _PyCapsule_GetDestructor.argtypes = (ctypes.py_object,) 

2080 _PyCapsule_GetDestructor.restype = ctypes.c_void_p 

2081 _PyCapsule_GetContext = ctypes.pythonapi.PyCapsule_GetContext 

2082 _PyCapsule_GetContext.argtypes = (ctypes.py_object,) 

2083 _PyCapsule_GetContext.restype = ctypes.c_void_p 

2084 _PyCapsule_GetName = ctypes.pythonapi.PyCapsule_GetName 

2085 _PyCapsule_GetName.argtypes = (ctypes.py_object,) 

2086 _PyCapsule_GetName.restype = ctypes.c_char_p 

2087 _PyCapsule_IsValid = ctypes.pythonapi.PyCapsule_IsValid 

2088 _PyCapsule_IsValid.argtypes = (ctypes.py_object, ctypes.c_char_p) 

2089 _PyCapsule_IsValid.restype = ctypes.c_bool 

2090 _PyCapsule_SetContext = ctypes.pythonapi.PyCapsule_SetContext 

2091 _PyCapsule_SetContext.argtypes = (ctypes.py_object, ctypes.c_void_p) 

2092 _PyCapsule_SetDestructor = ctypes.pythonapi.PyCapsule_SetDestructor 

2093 _PyCapsule_SetDestructor.argtypes = (ctypes.py_object, ctypes.c_void_p) 

2094 _PyCapsule_SetName = ctypes.pythonapi.PyCapsule_SetName 

2095 _PyCapsule_SetName.argtypes = (ctypes.py_object, ctypes.c_char_p) 

2096 _PyCapsule_SetPointer = ctypes.pythonapi.PyCapsule_SetPointer 

2097 _PyCapsule_SetPointer.argtypes = (ctypes.py_object, ctypes.c_void_p) 

2098 #from _socket import CAPI as _testcapsule 

2099 _testcapsule_name = b'dill._dill._testcapsule' 

2100 _testcapsule = _PyCapsule_New( 

2101 ctypes.cast(_PyCapsule_New, ctypes.c_void_p), 

2102 ctypes.c_char_p(_testcapsule_name), 

2103 None 

2104 ) 

2105 PyCapsuleType = type(_testcapsule) 

2106 @register(PyCapsuleType) 

2107 def save_capsule(pickler, obj): 

2108 logger.trace(pickler, "Cap: %s", obj) 

2109 name = _PyCapsule_GetName(obj) 

2110 #warnings.warn('Pickling a PyCapsule (%s) does not pickle any C data structures and could cause segmentation faults or other memory errors when unpickling.' % (name,), PicklingWarning) 

2111 pointer = _PyCapsule_GetPointer(obj, name) 

2112 context = _PyCapsule_GetContext(obj) 

2113 destructor = _PyCapsule_GetDestructor(obj) 

2114 pickler.save_reduce(_create_capsule, (pointer, name, context, destructor), obj=obj) 

2115 logger.trace(pickler, "# Cap") 

2116 _incedental_reverse_typemap['PyCapsuleType'] = PyCapsuleType 

2117 _reverse_typemap['PyCapsuleType'] = PyCapsuleType 

2118 _incedental_types.add(PyCapsuleType) 

2119else: 

2120 _testcapsule = None 

2121 

2122 

2123############################# 

2124# A quick fix for issue #500 

2125# This should be removed when a better solution is found. 

2126 

2127if hasattr(dataclasses, "_HAS_DEFAULT_FACTORY_CLASS"): 

2128 @register(dataclasses._HAS_DEFAULT_FACTORY_CLASS) 

2129 def save_dataclasses_HAS_DEFAULT_FACTORY_CLASS(pickler, obj): 

2130 logger.trace(pickler, "DcHDF: %s", obj) 

2131 pickler.write(GLOBAL + b"dataclasses\n_HAS_DEFAULT_FACTORY\n") 

2132 logger.trace(pickler, "# DcHDF") 

2133 

2134if hasattr(dataclasses, "MISSING"): 

2135 @register(type(dataclasses.MISSING)) 

2136 def save_dataclasses_MISSING_TYPE(pickler, obj): 

2137 logger.trace(pickler, "DcM: %s", obj) 

2138 pickler.write(GLOBAL + b"dataclasses\nMISSING\n") 

2139 logger.trace(pickler, "# DcM") 

2140 

2141if hasattr(dataclasses, "KW_ONLY"): 

2142 @register(type(dataclasses.KW_ONLY)) 

2143 def save_dataclasses_KW_ONLY_TYPE(pickler, obj): 

2144 logger.trace(pickler, "DcKWO: %s", obj) 

2145 pickler.write(GLOBAL + b"dataclasses\nKW_ONLY\n") 

2146 logger.trace(pickler, "# DcKWO") 

2147 

2148if hasattr(dataclasses, "_FIELD_BASE"): 

2149 @register(dataclasses._FIELD_BASE) 

2150 def save_dataclasses_FIELD_BASE(pickler, obj): 

2151 logger.trace(pickler, "DcFB: %s", obj) 

2152 pickler.write(GLOBAL + b"dataclasses\n" + obj.name.encode() + b"\n") 

2153 logger.trace(pickler, "# DcFB") 

2154 

2155############################# 

2156 

2157# quick sanity checking 

2158def pickles(obj,exact=False,safe=False,**kwds): 

2159 """ 

2160 Quick check if object pickles with dill. 

2161 

2162 If *exact=True* then an equality test is done to check if the reconstructed 

2163 object matches the original object. 

2164 

2165 If *safe=True* then any exception will raised in copy signal that the 

2166 object is not picklable, otherwise only pickling errors will be trapped. 

2167 

2168 Additional keyword arguments are as :func:`dumps` and :func:`loads`. 

2169 """ 

2170 if safe: exceptions = (Exception,) # RuntimeError, ValueError 

2171 else: 

2172 exceptions = (TypeError, AssertionError, NotImplementedError, PicklingError, UnpicklingError) 

2173 try: 

2174 pik = copy(obj, **kwds) 

2175 #FIXME: should check types match first, then check content if "exact" 

2176 try: 

2177 #FIXME: should be "(pik == obj).all()" for numpy comparison, though that'll fail if shapes differ 

2178 result = bool(pik.all() == obj.all()) 

2179 except (AttributeError, TypeError): 

2180 warnings.filterwarnings('ignore') #FIXME: be specific 

2181 result = pik == obj 

2182 if warnings.filters: del warnings.filters[0] 

2183 if hasattr(result, 'toarray'): # for unusual types like sparse matrix 

2184 result = result.toarray().all() 

2185 if result: return True 

2186 if not exact: 

2187 result = type(pik) == type(obj) 

2188 if result: return result 

2189 # class instances might have been dumped with byref=False 

2190 return repr(type(pik)) == repr(type(obj)) #XXX: InstanceType? 

2191 return False 

2192 except exceptions: 

2193 return False 

2194 

2195def check(obj, *args, **kwds): 

2196 """ 

2197 Check pickling of an object across another process. 

2198 

2199 *python* is the path to the python interpreter (defaults to sys.executable) 

2200 

2201 Set *verbose=True* to print the unpickled object in the other process. 

2202 

2203 Additional keyword arguments are as :func:`dumps` and :func:`loads`. 

2204 """ 

2205 # == undocumented == 

2206 # python -- the string path or executable name of the selected python 

2207 # verbose -- if True, be verbose about printing warning messages 

2208 # all other args and kwds are passed to dill.dumps #FIXME: ignore on load 

2209 verbose = kwds.pop('verbose', False) 

2210 python = kwds.pop('python', None) 

2211 if python is None: 

2212 import sys 

2213 python = sys.executable 

2214 # type check 

2215 isinstance(python, str) 

2216 import subprocess 

2217 fail = True 

2218 try: 

2219 _obj = dumps(obj, *args, **kwds) 

2220 fail = False 

2221 finally: 

2222 if fail and verbose: 

2223 print("DUMP FAILED") 

2224 #FIXME: fails if python interpreter path contains spaces 

2225 # Use the following instead (which also processes the 'ignore' keyword): 

2226 # ignore = kwds.pop('ignore', None) 

2227 # unpickle = "dill.loads(%s, ignore=%s)"%(repr(_obj), repr(ignore)) 

2228 # cmd = [python, "-c", "import dill; print(%s)"%unpickle] 

2229 # msg = "SUCCESS" if not subprocess.call(cmd) else "LOAD FAILED" 

2230 msg = "%s -c import dill; print(dill.loads(%s))" % (python, repr(_obj)) 

2231 msg = "SUCCESS" if not subprocess.call(msg.split(None,2)) else "LOAD FAILED" 

2232 if verbose: 

2233 print(msg) 

2234 return 

2235 

2236# use to protect against missing attributes 

2237def is_dill(pickler, child=None): 

2238 "check the dill-ness of your pickler" 

2239 if child is False or not hasattr(pickler.__class__, 'mro'): 

2240 return 'dill' in pickler.__module__ 

2241 return Pickler in pickler.__class__.mro() 

2242 

2243def _extend(): 

2244 """extend pickle with all of dill's registered types""" 

2245 # need to have pickle not choke on _main_module? use is_dill(pickler) 

2246 for t,func in Pickler.dispatch.items(): 

2247 try: 

2248 StockPickler.dispatch[t] = func 

2249 except Exception: #TypeError, PicklingError, UnpicklingError 

2250 logger.trace(pickler, "skip: %s", t) 

2251 return 

2252 

2253del diff, _use_diff, use_diff 

2254 

2255# EOF