Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/dill/_dill.py: 29%

1270 statements  

« prev     ^ index     » next       coverage.py v7.0.1, created at 2022-12-25 06:11 +0000

1# -*- coding: utf-8 -*- 

2# 

3# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) 

4# Copyright (c) 2008-2015 California Institute of Technology. 

5# Copyright (c) 2016-2022 The Uncertainty Quantification Foundation. 

6# License: 3-clause BSD. The full license text is available at: 

7# - https://github.com/uqfoundation/dill/blob/master/LICENSE 

8""" 

9dill: a utility for serialization of python objects 

10 

11Based on code written by Oren Tirosh and Armin Ronacher. 

12Extended to a (near) full set of the builtin types (in types module), 

13and coded to the pickle interface, by <mmckerns@caltech.edu>. 

14Initial port to python3 by Jonathan Dobson, continued by mmckerns. 

15Test against "all" python types (Std. Lib. CH 1-15 @ 2.7) by mmckerns. 

16Test against CH16+ Std. Lib. ... TBD. 

17""" 

18__all__ = [ 

19 'Pickler','Unpickler', 

20 'check','copy','dump','dumps','load','loads','pickle','pickles','register', 

21 'DEFAULT_PROTOCOL','HIGHEST_PROTOCOL','CONTENTS_FMODE','FILE_FMODE','HANDLE_FMODE', 

22 'PickleError','PickleWarning','PicklingError','PicklingWarning','UnpicklingError', 

23 'UnpicklingWarning', 

24] 

25 

26__module__ = 'dill' 

27 

28import warnings 

29from .logger import adapter as logger 

30from .logger import trace as _trace 

31 

32import os 

33import sys 

34diff = None 

35_use_diff = False 

36OLD38 = (sys.hexversion < 0x3080000) 

37OLD39 = (sys.hexversion < 0x3090000) 

38OLD310 = (sys.hexversion < 0x30a0000) 

39#XXX: get types from .objtypes ? 

40import builtins as __builtin__ 

41from pickle import _Pickler as StockPickler, Unpickler as StockUnpickler 

42from _thread import LockType 

43from _thread import RLock as RLockType 

44#from io import IOBase 

45from types import CodeType, FunctionType, MethodType, GeneratorType, \ 

46 TracebackType, FrameType, ModuleType, BuiltinMethodType 

47BufferType = memoryview #XXX: unregistered 

48ClassType = type # no 'old-style' classes 

49EllipsisType = type(Ellipsis) 

50#FileType = IOBase 

51NotImplementedType = type(NotImplemented) 

52SliceType = slice 

53TypeType = type # 'new-style' classes #XXX: unregistered 

54XRangeType = range 

55from types import MappingProxyType as DictProxyType 

56from pickle import DEFAULT_PROTOCOL, HIGHEST_PROTOCOL, PickleError, PicklingError, UnpicklingError 

57import __main__ as _main_module 

58import marshal 

59import gc 

60# import zlib 

61from weakref import ReferenceType, ProxyType, CallableProxyType 

62from collections import OrderedDict 

63from functools import partial 

64from operator import itemgetter, attrgetter 

65GENERATOR_FAIL = False 

66import importlib.machinery 

67EXTENSION_SUFFIXES = tuple(importlib.machinery.EXTENSION_SUFFIXES) 

68try: 

69 import ctypes 

70 HAS_CTYPES = True 

71 # if using `pypy`, pythonapi is not found 

72 IS_PYPY = not hasattr(ctypes, 'pythonapi') 

73except ImportError: 

74 HAS_CTYPES = False 

75 IS_PYPY = False 

76NumpyUfuncType = None 

77NumpyDType = None 

78NumpyArrayType = None 

79try: 

80 if not importlib.machinery.PathFinder().find_spec('numpy'): 

81 raise ImportError("No module named 'numpy'") 

82 NumpyUfuncType = True 

83 NumpyDType = True 

84 NumpyArrayType = True 

85except ImportError: 

86 pass 

87def __hook__(): 

88 global NumpyArrayType, NumpyDType, NumpyUfuncType 

89 from numpy import ufunc as NumpyUfuncType 

90 from numpy import ndarray as NumpyArrayType 

91 from numpy import dtype as NumpyDType 

92 return True 

93if NumpyArrayType: # then has numpy 

94 def ndarraysubclassinstance(obj_type): 

95 if all((c.__module__, c.__name__) != ('numpy', 'ndarray') for c in obj_type.__mro__): 

96 return False 

97 # anything below here is a numpy array (or subclass) instance 

98 __hook__() # import numpy (so the following works!!!) 

99 # verify that __reduce__ has not been overridden 

100 if obj_type.__reduce_ex__ is not NumpyArrayType.__reduce_ex__ \ 

101 or obj_type.__reduce__ is not NumpyArrayType.__reduce__: 

102 return False 

103 return True 

104 def numpyufunc(obj_type): 

105 return any((c.__module__, c.__name__) == ('numpy', 'ufunc') for c in obj_type.__mro__) 

106 def numpydtype(obj_type): 

107 if all((c.__module__, c.__name__) != ('numpy', 'dtype') for c in obj_type.__mro__): 

108 return False 

109 # anything below here is a numpy dtype 

110 __hook__() # import numpy (so the following works!!!) 

111 return obj_type is type(NumpyDType) # handles subclasses 

112else: 

113 def ndarraysubclassinstance(obj): return False 

114 def numpyufunc(obj): return False 

115 def numpydtype(obj): return False 

116 

117from types import GetSetDescriptorType, ClassMethodDescriptorType, \ 

118 WrapperDescriptorType, MethodDescriptorType, MemberDescriptorType, \ 

119 MethodWrapperType #XXX: unused 

120 

121# make sure to add these 'hand-built' types to _typemap 

122CellType = type((lambda x: lambda y: x)(0).__closure__[0]) 

123PartialType = type(partial(int, base=2)) 

124SuperType = type(super(Exception, TypeError())) 

125ItemGetterType = type(itemgetter(0)) 

126AttrGetterType = type(attrgetter('__repr__')) 

127 

128try: 

129 from functools import _lru_cache_wrapper as LRUCacheType 

130except ImportError: 

131 LRUCacheType = None 

132 

133if not isinstance(LRUCacheType, type): 

134 LRUCacheType = None 

135 

136def get_file_type(*args, **kwargs): 

137 open = kwargs.pop("open", __builtin__.open) 

138 f = open(os.devnull, *args, **kwargs) 

139 t = type(f) 

140 f.close() 

141 return t 

142 

143FileType = get_file_type('rb', buffering=0) 

144TextWrapperType = get_file_type('r', buffering=-1) 

145BufferedRandomType = get_file_type('r+b', buffering=-1) 

146BufferedReaderType = get_file_type('rb', buffering=-1) 

147BufferedWriterType = get_file_type('wb', buffering=-1) 

148try: 

149 from _pyio import open as _open 

150 PyTextWrapperType = get_file_type('r', buffering=-1, open=_open) 

151 PyBufferedRandomType = get_file_type('r+b', buffering=-1, open=_open) 

152 PyBufferedReaderType = get_file_type('rb', buffering=-1, open=_open) 

153 PyBufferedWriterType = get_file_type('wb', buffering=-1, open=_open) 

154except ImportError: 

155 PyTextWrapperType = PyBufferedRandomType = PyBufferedReaderType = PyBufferedWriterType = None 

156from io import BytesIO as StringIO 

157InputType = OutputType = None 

158from socket import socket as SocketType 

159#FIXME: additionally calls ForkingPickler.register several times 

160from multiprocessing.reduction import _reduce_socket as reduce_socket 

161try: 

162 IS_IPYTHON = __IPYTHON__ # is True 

163 ExitType = None # IPython.core.autocall.ExitAutocall 

164 singletontypes = ['exit', 'quit', 'get_ipython'] 

165except NameError: 

166 IS_IPYTHON = False 

167 try: ExitType = type(exit) # apparently 'exit' can be removed 

168 except NameError: ExitType = None 

169 singletontypes = [] 

170 

171import inspect 

172import dataclasses 

173import typing 

174 

175from pickle import GLOBAL 

176 

177 

178### Shims for different versions of Python and dill 

179class Sentinel(object): 

180 """ 

181 Create a unique sentinel object that is pickled as a constant. 

182 """ 

183 def __init__(self, name, module_name=None): 

184 self.name = name 

185 if module_name is None: 

186 # Use the calling frame's module 

187 self.__module__ = inspect.currentframe().f_back.f_globals['__name__'] 

188 else: 

189 self.__module__ = module_name # pragma: no cover 

190 def __repr__(self): 

191 return self.__module__ + '.' + self.name # pragma: no cover 

192 def __copy__(self): 

193 return self # pragma: no cover 

194 def __deepcopy__(self, memo): 

195 return self # pragma: no cover 

196 def __reduce__(self): 

197 return self.name 

198 def __reduce_ex__(self, protocol): 

199 return self.name 

200 

201from . import _shims 

202from ._shims import Reduce, Getattr 

203 

204### File modes 

205#: Pickles the file handle, preserving mode. The position of the unpickled 

206#: object is as for a new file handle. 

207HANDLE_FMODE = 0 

208#: Pickles the file contents, creating a new file if on load the file does 

209#: not exist. The position = min(pickled position, EOF) and mode is chosen 

210#: as such that "best" preserves behavior of the original file. 

211CONTENTS_FMODE = 1 

212#: Pickles the entire file (handle and contents), preserving mode and position. 

213FILE_FMODE = 2 

214 

215### Shorthands (modified from python2.5/lib/pickle.py) 

216def copy(obj, *args, **kwds): 

217 """ 

218 Use pickling to 'copy' an object (i.e. `loads(dumps(obj))`). 

219 

220 See :func:`dumps` and :func:`loads` for keyword arguments. 

221 """ 

222 ignore = kwds.pop('ignore', Unpickler.settings['ignore']) 

223 return loads(dumps(obj, *args, **kwds), ignore=ignore) 

224 

225def dump(obj, file, protocol=None, byref=None, fmode=None, recurse=None, **kwds):#, strictio=None): 

226 """ 

227 Pickle an object to a file. 

228 

229 See :func:`dumps` for keyword arguments. 

230 """ 

231 from .settings import settings 

232 protocol = settings['protocol'] if protocol is None else int(protocol) 

233 _kwds = kwds.copy() 

234 _kwds.update(dict(byref=byref, fmode=fmode, recurse=recurse)) 

235 Pickler(file, protocol, **_kwds).dump(obj) 

236 return 

237 

238def dumps(obj, protocol=None, byref=None, fmode=None, recurse=None, **kwds):#, strictio=None): 

239 """ 

240 Pickle an object to a string. 

241 

242 *protocol* is the pickler protocol, as defined for Python *pickle*. 

243 

244 If *byref=True*, then dill behaves a lot more like pickle as certain 

245 objects (like modules) are pickled by reference as opposed to attempting 

246 to pickle the object itself. 

247 

248 If *recurse=True*, then objects referred to in the global dictionary 

249 are recursively traced and pickled, instead of the default behavior 

250 of attempting to store the entire global dictionary. This is needed for 

251 functions defined via *exec()*. 

252 

253 *fmode* (:const:`HANDLE_FMODE`, :const:`CONTENTS_FMODE`, 

254 or :const:`FILE_FMODE`) indicates how file handles will be pickled. 

255 For example, when pickling a data file handle for transfer to a remote 

256 compute service, *FILE_FMODE* will include the file contents in the 

257 pickle and cursor position so that a remote method can operate 

258 transparently on an object with an open file handle. 

259 

260 Default values for keyword arguments can be set in :mod:`dill.settings`. 

261 """ 

262 file = StringIO() 

263 dump(obj, file, protocol, byref, fmode, recurse, **kwds)#, strictio) 

264 return file.getvalue() 

265 

266def load(file, ignore=None, **kwds): 

267 """ 

268 Unpickle an object from a file. 

269 

270 See :func:`loads` for keyword arguments. 

271 """ 

272 return Unpickler(file, ignore=ignore, **kwds).load() 

273 

274def loads(str, ignore=None, **kwds): 

275 """ 

276 Unpickle an object from a string. 

277 

278 If *ignore=False* then objects whose class is defined in the module 

279 *__main__* are updated to reference the existing class in *__main__*, 

280 otherwise they are left to refer to the reconstructed type, which may 

281 be different. 

282 

283 Default values for keyword arguments can be set in :mod:`dill.settings`. 

284 """ 

285 file = StringIO(str) 

286 return load(file, ignore, **kwds) 

287 

288# def dumpzs(obj, protocol=None): 

289# """pickle an object to a compressed string""" 

290# return zlib.compress(dumps(obj, protocol)) 

291 

292# def loadzs(str): 

293# """unpickle an object from a compressed string""" 

294# return loads(zlib.decompress(str)) 

295 

296### End: Shorthands ### 

297 

298class MetaCatchingDict(dict): 

299 def get(self, key, default=None): 

300 try: 

301 return self[key] 

302 except KeyError: 

303 return default 

304 

305 def __missing__(self, key): 

306 if issubclass(key, type): 

307 return save_type 

308 else: 

309 raise KeyError() 

310 

311class PickleWarning(Warning, PickleError): 

312 pass 

313 

314class PicklingWarning(PickleWarning, PicklingError): 

315 pass 

316 

317class UnpicklingWarning(PickleWarning, UnpicklingError): 

318 pass 

319 

320### Extend the Picklers 

321class Pickler(StockPickler): 

322 """python's Pickler extended to interpreter sessions""" 

323 dispatch = MetaCatchingDict(StockPickler.dispatch.copy()) 

324 _session = False 

325 from .settings import settings 

326 

327 def __init__(self, file, *args, **kwds): 

328 settings = Pickler.settings 

329 _byref = kwds.pop('byref', None) 

330 #_strictio = kwds.pop('strictio', None) 

331 _fmode = kwds.pop('fmode', None) 

332 _recurse = kwds.pop('recurse', None) 

333 StockPickler.__init__(self, file, *args, **kwds) 

334 self._main = _main_module 

335 self._diff_cache = {} 

336 self._byref = settings['byref'] if _byref is None else _byref 

337 self._strictio = False #_strictio 

338 self._fmode = settings['fmode'] if _fmode is None else _fmode 

339 self._recurse = settings['recurse'] if _recurse is None else _recurse 

340 self._postproc = OrderedDict() 

341 self._file = file 

342 

343 def save(self, obj, save_persistent_id=True): 

344 # register if the object is a numpy ufunc 

345 # thanks to Paul Kienzle for pointing out ufuncs didn't pickle 

346 obj_type = type(obj) 

347 if NumpyArrayType and not (obj_type is type or obj_type in Pickler.dispatch): 

348 if NumpyUfuncType and numpyufunc(obj_type): 

349 @register(obj_type) 

350 def save_numpy_ufunc(pickler, obj): 

351 logger.trace(pickler, "Nu: %s", obj) 

352 name = getattr(obj, '__qualname__', getattr(obj, '__name__', None)) 

353 StockPickler.save_global(pickler, obj, name=name) 

354 logger.trace(pickler, "# Nu") 

355 return 

356 # NOTE: the above 'save' performs like: 

357 # import copy_reg 

358 # def udump(f): return f.__name__ 

359 # def uload(name): return getattr(numpy, name) 

360 # copy_reg.pickle(NumpyUfuncType, udump, uload) 

361 # register if the object is a numpy dtype 

362 if NumpyDType and numpydtype(obj_type): 

363 @register(obj_type) 

364 def save_numpy_dtype(pickler, obj): 

365 logger.trace(pickler, "Dt: %s", obj) 

366 pickler.save_reduce(_create_dtypemeta, (obj.type,), obj=obj) 

367 logger.trace(pickler, "# Dt") 

368 return 

369 # NOTE: the above 'save' performs like: 

370 # import copy_reg 

371 # def uload(name): return type(NumpyDType(name)) 

372 # def udump(f): return uload, (f.type,) 

373 # copy_reg.pickle(NumpyDTypeType, udump, uload) 

374 # register if the object is a subclassed numpy array instance 

375 if NumpyArrayType and ndarraysubclassinstance(obj_type): 

376 @register(obj_type) 

377 def save_numpy_array(pickler, obj): 

378 logger.trace(pickler, "Nu: (%s, %s)", obj.shape, obj.dtype) 

379 npdict = getattr(obj, '__dict__', None) 

380 f, args, state = obj.__reduce__() 

381 pickler.save_reduce(_create_array, (f,args,state,npdict), obj=obj) 

382 logger.trace(pickler, "# Nu") 

383 return 

384 # end hack 

385 if GENERATOR_FAIL and type(obj) == GeneratorType: 

386 msg = "Can't pickle %s: attribute lookup builtins.generator failed" % GeneratorType 

387 raise PicklingError(msg) 

388 StockPickler.save(self, obj, save_persistent_id) 

389 

390 save.__doc__ = StockPickler.save.__doc__ 

391 

392 def dump(self, obj): #NOTE: if settings change, need to update attributes 

393 logger.trace_setup(self) 

394 StockPickler.dump(self, obj) 

395 

396 dump.__doc__ = StockPickler.dump.__doc__ 

397 

398class Unpickler(StockUnpickler): 

399 """python's Unpickler extended to interpreter sessions and more types""" 

400 from .settings import settings 

401 _session = False 

402 

403 def find_class(self, module, name): 

404 if (module, name) == ('__builtin__', '__main__'): 

405 return self._main.__dict__ #XXX: above set w/save_module_dict 

406 elif (module, name) == ('__builtin__', 'NoneType'): 

407 return type(None) #XXX: special case: NoneType missing 

408 if module == 'dill.dill': module = 'dill._dill' 

409 return StockUnpickler.find_class(self, module, name) 

410 

411 def __init__(self, *args, **kwds): 

412 settings = Pickler.settings 

413 _ignore = kwds.pop('ignore', None) 

414 StockUnpickler.__init__(self, *args, **kwds) 

415 self._main = _main_module 

416 self._ignore = settings['ignore'] if _ignore is None else _ignore 

417 

418 def load(self): #NOTE: if settings change, need to update attributes 

419 obj = StockUnpickler.load(self) 

420 if type(obj).__module__ == getattr(_main_module, '__name__', '__main__'): 

421 if not self._ignore: 

422 # point obj class to main 

423 try: obj.__class__ = getattr(self._main, type(obj).__name__) 

424 except (AttributeError,TypeError): pass # defined in a file 

425 #_main_module.__dict__.update(obj.__dict__) #XXX: should update globals ? 

426 return obj 

427 load.__doc__ = StockUnpickler.load.__doc__ 

428 pass 

429 

430''' 

431def dispatch_table(): 

432 """get the dispatch table of registered types""" 

433 return Pickler.dispatch 

434''' 

435 

436pickle_dispatch_copy = StockPickler.dispatch.copy() 

437 

438def pickle(t, func): 

439 """expose dispatch table for user-created extensions""" 

440 Pickler.dispatch[t] = func 

441 return 

442 

443def register(t): 

444 """register type to Pickler's dispatch table """ 

445 def proxy(func): 

446 Pickler.dispatch[t] = func 

447 return func 

448 return proxy 

449 

450def _revert_extension(): 

451 """drop dill-registered types from pickle's dispatch table""" 

452 for type, func in list(StockPickler.dispatch.items()): 

453 if func.__module__ == __name__: 

454 del StockPickler.dispatch[type] 

455 if type in pickle_dispatch_copy: 

456 StockPickler.dispatch[type] = pickle_dispatch_copy[type] 

457 

458def use_diff(on=True): 

459 """ 

460 Reduces size of pickles by only including object which have changed. 

461 

462 Decreases pickle size but increases CPU time needed. 

463 Also helps avoid some unpicklable objects. 

464 MUST be called at start of script, otherwise changes will not be recorded. 

465 """ 

466 global _use_diff, diff 

467 _use_diff = on 

468 if _use_diff and diff is None: 

469 try: 

470 from . import diff as d 

471 except ImportError: 

472 import diff as d 

473 diff = d 

474 

475def _create_typemap(): 

476 import types 

477 d = dict(list(__builtin__.__dict__.items()) + \ 

478 list(types.__dict__.items())).items() 

479 for key, value in d: 

480 if getattr(value, '__module__', None) == 'builtins' \ 

481 and type(value) is type: 

482 yield key, value 

483 return 

484_reverse_typemap = dict(_create_typemap()) 

485_reverse_typemap.update({ 

486 'PartialType': PartialType, 

487 'SuperType': SuperType, 

488 'ItemGetterType': ItemGetterType, 

489 'AttrGetterType': AttrGetterType, 

490}) 

491if sys.hexversion < 0x30800a2: 

492 _reverse_typemap.update({ 

493 'CellType': CellType, 

494 }) 

495 

496# "Incidental" implementation specific types. Unpickling these types in another 

497# implementation of Python (PyPy -> CPython) is not guaranteed to work 

498 

499# This dictionary should contain all types that appear in Python implementations 

500# but are not defined in https://docs.python.org/3/library/types.html#standard-interpreter-types 

501x=OrderedDict() 

502_incedental_reverse_typemap = { 

503 'FileType': FileType, 

504 'BufferedRandomType': BufferedRandomType, 

505 'BufferedReaderType': BufferedReaderType, 

506 'BufferedWriterType': BufferedWriterType, 

507 'TextWrapperType': TextWrapperType, 

508 'PyBufferedRandomType': PyBufferedRandomType, 

509 'PyBufferedReaderType': PyBufferedReaderType, 

510 'PyBufferedWriterType': PyBufferedWriterType, 

511 'PyTextWrapperType': PyTextWrapperType, 

512} 

513 

514_incedental_reverse_typemap.update({ 

515 "DictKeysType": type({}.keys()), 

516 "DictValuesType": type({}.values()), 

517 "DictItemsType": type({}.items()), 

518 

519 "OdictKeysType": type(x.keys()), 

520 "OdictValuesType": type(x.values()), 

521 "OdictItemsType": type(x.items()), 

522}) 

523 

524if ExitType: 

525 _incedental_reverse_typemap['ExitType'] = ExitType 

526if InputType: 

527 _incedental_reverse_typemap['InputType'] = InputType 

528 _incedental_reverse_typemap['OutputType'] = OutputType 

529 

530''' 

531try: 

532 import symtable 

533 _incedental_reverse_typemap["SymtableEntryType"] = type(symtable.symtable("", "string", "exec")._table) 

534except: #FIXME: fails to pickle 

535 pass 

536 

537if sys.hexversion >= 0x30a00a0: 

538 _incedental_reverse_typemap['LineIteratorType'] = type(compile('3', '', 'eval').co_lines()) 

539''' 

540 

541if sys.hexversion >= 0x30b00b0: 

542 from types import GenericAlias 

543 _incedental_reverse_typemap["GenericAliasIteratorType"] = type(iter(GenericAlias(list, (int,)))) 

544 ''' 

545 _incedental_reverse_typemap['PositionsIteratorType'] = type(compile('3', '', 'eval').co_positions()) 

546 ''' 

547 

548try: 

549 import winreg 

550 _incedental_reverse_typemap["HKEYType"] = winreg.HKEYType 

551except ImportError: 

552 pass 

553 

554_reverse_typemap.update(_incedental_reverse_typemap) 

555_incedental_types = set(_incedental_reverse_typemap.values()) 

556 

557del x 

558 

559_typemap = dict((v, k) for k, v in _reverse_typemap.items()) 

560 

561def _unmarshal(string): 

562 return marshal.loads(string) 

563 

564def _load_type(name): 

565 return _reverse_typemap[name] 

566 

567def _create_type(typeobj, *args): 

568 return typeobj(*args) 

569 

570def _create_function(fcode, fglobals, fname=None, fdefaults=None, 

571 fclosure=None, fdict=None, fkwdefaults=None): 

572 # same as FunctionType, but enable passing __dict__ to new function, 

573 # __dict__ is the storehouse for attributes added after function creation 

574 func = FunctionType(fcode, fglobals or dict(), fname, fdefaults, fclosure) 

575 if fdict is not None: 

576 func.__dict__.update(fdict) #XXX: better copy? option to copy? 

577 if fkwdefaults is not None: 

578 func.__kwdefaults__ = fkwdefaults 

579 # 'recurse' only stores referenced modules/objects in fglobals, 

580 # thus we need to make sure that we have __builtins__ as well 

581 if "__builtins__" not in func.__globals__: 

582 func.__globals__["__builtins__"] = globals()["__builtins__"] 

583 # assert id(fglobals) == id(func.__globals__) 

584 return func 

585 

586class match: 

587 """ 

588 Make avaialable a limited structural pattern matching-like syntax for Python < 3.10 

589 

590 Patterns can be only tuples (without types) currently. 

591 Inspired by the package pattern-matching-PEP634. 

592 

593 Usage: 

594 >>> with match(args) as m: 

595 >>> if m.case(('x', 'y')): 

596 >>> # use m.x and m.y 

597 >>> elif m.case(('x', 'y', 'z')): 

598 >>> # use m.x, m.y and m.z 

599 

600 Equivalent native code for Python >= 3.10: 

601 >>> match args: 

602 >>> case (x, y): 

603 >>> # use x and y 

604 >>> case (x, y, z): 

605 >>> # use x, y and z 

606 """ 

607 def __init__(self, value): 

608 self.value = value 

609 self._fields = None 

610 def __enter__(self): 

611 return self 

612 def __exit__(self, *exc_info): 

613 return False 

614 def case(self, args): # *args, **kwargs): 

615 """just handles tuple patterns""" 

616 if len(self.value) != len(args): # + len(kwargs): 

617 return False 

618 #if not all(isinstance(arg, pat) for arg, pat in zip(self.value[len(args):], kwargs.values())): 

619 # return False 

620 self.args = args # (*args, *kwargs) 

621 return True 

622 @property 

623 def fields(self): 

624 # Only bind names to values if necessary. 

625 if self._fields is None: 

626 self._fields = dict(zip(self.args, self.value)) 

627 return self._fields 

628 def __getattr__(self, item): 

629 return self.fields[item] 

630 

631ALL_CODE_PARAMS = [ 

632 # Version New attribute CodeType parameters 

633 ((3,11,'a'), 'co_endlinetable', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name qualname firstlineno linetable endlinetable columntable exceptiontable freevars cellvars'), 

634 ((3,11), 'co_exceptiontable', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name qualname firstlineno linetable exceptiontable freevars cellvars'), 

635 ((3,10), 'co_linetable', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name firstlineno linetable freevars cellvars'), 

636 ((3,8), 'co_posonlyargcount', 'argcount posonlyargcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name firstlineno lnotab freevars cellvars'), 

637 ((3,7), 'co_kwonlyargcount', 'argcount kwonlyargcount nlocals stacksize flags code consts names varnames filename name firstlineno lnotab freevars cellvars'), 

638 ] 

639for version, new_attr, params in ALL_CODE_PARAMS: 

640 if hasattr(CodeType, new_attr): 

641 CODE_VERSION = version 

642 CODE_PARAMS = params.split() 

643 break 

644ENCODE_PARAMS = set(CODE_PARAMS).intersection( 

645 ['code', 'lnotab', 'linetable', 'endlinetable', 'columntable', 'exceptiontable']) 

646 

647def _create_code(*args): 

648 if not isinstance(args[0], int): # co_lnotab stored from >= 3.10 

649 LNOTAB, *args = args 

650 else: # from < 3.10 (or pre-LNOTAB storage) 

651 LNOTAB = b'' 

652 

653 with match(args) as m: 

654 # Python 3.11/3.12a (18 members) 

655 if m.case(( 

656 'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6] 

657 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'qualname', 'firstlineno', # args[6:14] 

658 'linetable', 'exceptiontable', 'freevars', 'cellvars' # args[14:] 

659 )): 

660 if CODE_VERSION == (3,11): 

661 return CodeType( 

662 *args[:6], 

663 args[6].encode() if hasattr(args[6], 'encode') else args[6], # code 

664 *args[7:14], 

665 args[14].encode() if hasattr(args[14], 'encode') else args[14], # linetable 

666 args[15].encode() if hasattr(args[15], 'encode') else args[15], # exceptiontable 

667 args[16], 

668 args[17], 

669 ) 

670 fields = m.fields 

671 # Python 3.10 or 3.8/3.9 (16 members) 

672 elif m.case(( 

673 'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6] 

674 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'firstlineno', # args[6:13] 

675 'LNOTAB_OR_LINETABLE', 'freevars', 'cellvars' # args[13:] 

676 )): 

677 if CODE_VERSION == (3,10) or CODE_VERSION == (3,8): 

678 return CodeType( 

679 *args[:6], 

680 args[6].encode() if hasattr(args[6], 'encode') else args[6], # code 

681 *args[7:13], 

682 args[13].encode() if hasattr(args[13], 'encode') else args[13], # lnotab/linetable 

683 args[14], 

684 args[15], 

685 ) 

686 fields = m.fields 

687 if CODE_VERSION >= (3,10): 

688 fields['linetable'] = m.LNOTAB_OR_LINETABLE 

689 else: 

690 fields['lnotab'] = LNOTAB if LNOTAB else m.LNOTAB_OR_LINETABLE 

691 # Python 3.7 (15 args) 

692 elif m.case(( 

693 'argcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:5] 

694 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'firstlineno', # args[5:12] 

695 'lnotab', 'freevars', 'cellvars' # args[12:] 

696 )): 

697 if CODE_VERSION == (3,7): 

698 return CodeType( 

699 *args[:5], 

700 args[5].encode() if hasattr(args[5], 'encode') else args[5], # code 

701 *args[6:12], 

702 args[12].encode() if hasattr(args[12], 'encode') else args[12], # lnotab 

703 args[13], 

704 args[14], 

705 ) 

706 fields = m.fields 

707 # Python 3.11a (20 members) 

708 elif m.case(( 

709 'argcount', 'posonlyargcount', 'kwonlyargcount', 'nlocals', 'stacksize', 'flags', # args[0:6] 

710 'code', 'consts', 'names', 'varnames', 'filename', 'name', 'qualname', 'firstlineno', # args[6:14] 

711 'linetable', 'endlinetable', 'columntable', 'exceptiontable', 'freevars', 'cellvars' # args[14:] 

712 )): 

713 if CODE_VERSION == (3,11,'a'): 

714 return CodeType( 

715 *args[:6], 

716 args[6].encode() if hasattr(args[6], 'encode') else args[6], # code 

717 *args[7:14], 

718 *(a.encode() if hasattr(a, 'encode') else a for a in args[14:18]), # linetable-exceptiontable 

719 args[18], 

720 args[19], 

721 ) 

722 fields = m.fields 

723 else: 

724 raise UnpicklingError("pattern match for code object failed") 

725 

726 # The args format doesn't match this version. 

727 fields.setdefault('posonlyargcount', 0) # from python <= 3.7 

728 fields.setdefault('lnotab', LNOTAB) # from python >= 3.10 

729 fields.setdefault('linetable', b'') # from python <= 3.9 

730 fields.setdefault('qualname', fields['name']) # from python <= 3.10 

731 fields.setdefault('exceptiontable', b'') # from python <= 3.10 

732 fields.setdefault('endlinetable', None) # from python != 3.11a 

733 fields.setdefault('columntable', None) # from python != 3.11a 

734 

735 args = (fields[k].encode() if k in ENCODE_PARAMS and hasattr(fields[k], 'encode') else fields[k] 

736 for k in CODE_PARAMS) 

737 return CodeType(*args) 

738 

739def _create_ftype(ftypeobj, func, args, kwds): 

740 if kwds is None: 

741 kwds = {} 

742 if args is None: 

743 args = () 

744 return ftypeobj(func, *args, **kwds) 

745 

746def _create_typing_tuple(argz, *args): #NOTE: workaround python/cpython#94245 

747 if not argz: 

748 return typing.Tuple[()].copy_with(()) 

749 if argz == ((),): 

750 return typing.Tuple[()] 

751 return typing.Tuple[argz] 

752 

753def _create_lock(locked, *args): #XXX: ignores 'blocking' 

754 from threading import Lock 

755 lock = Lock() 

756 if locked: 

757 if not lock.acquire(False): 

758 raise UnpicklingError("Cannot acquire lock") 

759 return lock 

760 

761def _create_rlock(count, owner, *args): #XXX: ignores 'blocking' 

762 lock = RLockType() 

763 if owner is not None: 

764 lock._acquire_restore((count, owner)) 

765 if owner and not lock._is_owned(): 

766 raise UnpicklingError("Cannot acquire lock") 

767 return lock 

768 

769# thanks to matsjoyce for adding all the different file modes 

770def _create_filehandle(name, mode, position, closed, open, strictio, fmode, fdata): # buffering=0 

771 # only pickles the handle, not the file contents... good? or StringIO(data)? 

772 # (for file contents see: http://effbot.org/librarybook/copy-reg.htm) 

773 # NOTE: handle special cases first (are there more special cases?) 

774 names = {'<stdin>':sys.__stdin__, '<stdout>':sys.__stdout__, 

775 '<stderr>':sys.__stderr__} #XXX: better fileno=(0,1,2) ? 

776 if name in list(names.keys()): 

777 f = names[name] #XXX: safer "f=sys.stdin" 

778 elif name == '<tmpfile>': 

779 f = os.tmpfile() 

780 elif name == '<fdopen>': 

781 import tempfile 

782 f = tempfile.TemporaryFile(mode) 

783 else: 

784 try: 

785 exists = os.path.exists(name) 

786 except Exception: 

787 exists = False 

788 if not exists: 

789 if strictio: 

790 raise FileNotFoundError("[Errno 2] No such file or directory: '%s'" % name) 

791 elif "r" in mode and fmode != FILE_FMODE: 

792 name = '<fdopen>' # or os.devnull? 

793 current_size = 0 # or maintain position? 

794 else: 

795 current_size = os.path.getsize(name) 

796 

797 if position > current_size: 

798 if strictio: 

799 raise ValueError("invalid buffer size") 

800 elif fmode == CONTENTS_FMODE: 

801 position = current_size 

802 # try to open the file by name 

803 # NOTE: has different fileno 

804 try: 

805 #FIXME: missing: *buffering*, encoding, softspace 

806 if fmode == FILE_FMODE: 

807 f = open(name, mode if "w" in mode else "w") 

808 f.write(fdata) 

809 if "w" not in mode: 

810 f.close() 

811 f = open(name, mode) 

812 elif name == '<fdopen>': # file did not exist 

813 import tempfile 

814 f = tempfile.TemporaryFile(mode) 

815 # treat x mode as w mode 

816 elif fmode == CONTENTS_FMODE \ 

817 and ("w" in mode or "x" in mode): 

818 # stop truncation when opening 

819 flags = os.O_CREAT 

820 if "+" in mode: 

821 flags |= os.O_RDWR 

822 else: 

823 flags |= os.O_WRONLY 

824 f = os.fdopen(os.open(name, flags), mode) 

825 # set name to the correct value 

826 r = getattr(f, "buffer", f) 

827 r = getattr(r, "raw", r) 

828 r.name = name 

829 assert f.name == name 

830 else: 

831 f = open(name, mode) 

832 except (IOError, FileNotFoundError): 

833 err = sys.exc_info()[1] 

834 raise UnpicklingError(err) 

835 if closed: 

836 f.close() 

837 elif position >= 0 and fmode != HANDLE_FMODE: 

838 f.seek(position) 

839 return f 

840 

841def _create_stringi(value, position, closed): 

842 f = StringIO(value) 

843 if closed: f.close() 

844 else: f.seek(position) 

845 return f 

846 

847def _create_stringo(value, position, closed): 

848 f = StringIO() 

849 if closed: f.close() 

850 else: 

851 f.write(value) 

852 f.seek(position) 

853 return f 

854 

855class _itemgetter_helper(object): 

856 def __init__(self): 

857 self.items = [] 

858 def __getitem__(self, item): 

859 self.items.append(item) 

860 return 

861 

862class _attrgetter_helper(object): 

863 def __init__(self, attrs, index=None): 

864 self.attrs = attrs 

865 self.index = index 

866 def __getattribute__(self, attr): 

867 attrs = object.__getattribute__(self, "attrs") 

868 index = object.__getattribute__(self, "index") 

869 if index is None: 

870 index = len(attrs) 

871 attrs.append(attr) 

872 else: 

873 attrs[index] = ".".join([attrs[index], attr]) 

874 return type(self)(attrs, index) 

875 

876class _dictproxy_helper(dict): 

877 def __ror__(self, a): 

878 return a 

879 

880_dictproxy_helper_instance = _dictproxy_helper() 

881 

882__d = {} 

883try: 

884 # In CPython 3.9 and later, this trick can be used to exploit the 

885 # implementation of the __or__ function of MappingProxyType to get the true 

886 # mapping referenced by the proxy. It may work for other implementations, 

887 # but is not guaranteed. 

888 MAPPING_PROXY_TRICK = __d is (DictProxyType(__d) | _dictproxy_helper_instance) 

889except Exception: 

890 MAPPING_PROXY_TRICK = False 

891del __d 

892 

893# _CELL_REF and _CELL_EMPTY are used to stay compatible with versions of dill 

894# whose _create_cell functions do not have a default value. 

895# _CELL_REF can be safely removed entirely (replaced by empty tuples for calls 

896# to _create_cell) once breaking changes are allowed. 

897_CELL_REF = None 

898_CELL_EMPTY = Sentinel('_CELL_EMPTY') 

899 

900def _create_cell(contents=None): 

901 if contents is not _CELL_EMPTY: 

902 value = contents 

903 return (lambda: value).__closure__[0] 

904 

905def _create_weakref(obj, *args): 

906 from weakref import ref 

907 if obj is None: # it's dead 

908 from collections import UserDict 

909 return ref(UserDict(), *args) 

910 return ref(obj, *args) 

911 

912def _create_weakproxy(obj, callable=False, *args): 

913 from weakref import proxy 

914 if obj is None: # it's dead 

915 if callable: return proxy(lambda x:x, *args) 

916 from collections import UserDict 

917 return proxy(UserDict(), *args) 

918 return proxy(obj, *args) 

919 

920def _eval_repr(repr_str): 

921 return eval(repr_str) 

922 

923def _create_array(f, args, state, npdict=None): 

924 #array = numpy.core.multiarray._reconstruct(*args) 

925 array = f(*args) 

926 array.__setstate__(state) 

927 if npdict is not None: # we also have saved state in __dict__ 

928 array.__dict__.update(npdict) 

929 return array 

930 

931def _create_dtypemeta(scalar_type): 

932 if NumpyDType is True: __hook__() # a bit hacky I think 

933 if scalar_type is None: 

934 return NumpyDType 

935 return type(NumpyDType(scalar_type)) 

936 

937def _create_namedtuple(name, fieldnames, modulename, defaults=None): 

938 class_ = _import_module(modulename + '.' + name, safe=True) 

939 if class_ is not None: 

940 return class_ 

941 import collections 

942 t = collections.namedtuple(name, fieldnames, defaults=defaults, module=modulename) 

943 return t 

944 

945def _create_capsule(pointer, name, context, destructor): 

946 attr_found = False 

947 try: 

948 # based on https://github.com/python/cpython/blob/f4095e53ab708d95e019c909d5928502775ba68f/Objects/capsule.c#L209-L231 

949 uname = name.decode('utf8') 

950 for i in range(1, uname.count('.')+1): 

951 names = uname.rsplit('.', i) 

952 try: 

953 module = __import__(names[0]) 

954 except ImportError: 

955 pass 

956 obj = module 

957 for attr in names[1:]: 

958 obj = getattr(obj, attr) 

959 capsule = obj 

960 attr_found = True 

961 break 

962 except Exception: 

963 pass 

964 

965 if attr_found: 

966 if _PyCapsule_IsValid(capsule, name): 

967 return capsule 

968 raise UnpicklingError("%s object exists at %s but a PyCapsule object was expected." % (type(capsule), name)) 

969 else: 

970 warnings.warn('Creating a new PyCapsule %s for a C data structure that may not be present in memory. Segmentation faults or other memory errors are possible.' % (name,), UnpicklingWarning) 

971 capsule = _PyCapsule_New(pointer, name, destructor) 

972 _PyCapsule_SetContext(capsule, context) 

973 return capsule 

974 

975def _getattr(objclass, name, repr_str): 

976 # hack to grab the reference directly 

977 try: #XXX: works only for __builtin__ ? 

978 attr = repr_str.split("'")[3] 

979 return eval(attr+'.__dict__["'+name+'"]') 

980 except Exception: 

981 try: 

982 attr = objclass.__dict__ 

983 if type(attr) is DictProxyType: 

984 attr = attr[name] 

985 else: 

986 attr = getattr(objclass,name) 

987 except (AttributeError, KeyError): 

988 attr = getattr(objclass,name) 

989 return attr 

990 

991def _get_attr(self, name): 

992 # stop recursive pickling 

993 return getattr(self, name, None) or getattr(__builtin__, name) 

994 

995def _import_module(import_name, safe=False): 

996 try: 

997 if import_name.startswith('__runtime__.'): 

998 return sys.modules[import_name] 

999 elif '.' in import_name: 

1000 items = import_name.split('.') 

1001 module = '.'.join(items[:-1]) 

1002 obj = items[-1] 

1003 else: 

1004 return __import__(import_name) 

1005 return getattr(__import__(module, None, None, [obj]), obj) 

1006 except (ImportError, AttributeError, KeyError): 

1007 if safe: 

1008 return None 

1009 raise 

1010 

1011# https://github.com/python/cpython/blob/a8912a0f8d9eba6d502c37d522221f9933e976db/Lib/pickle.py#L322-L333 

1012def _getattribute(obj, name): 

1013 for subpath in name.split('.'): 

1014 if subpath == '<locals>': 

1015 raise AttributeError("Can't get local attribute {!r} on {!r}" 

1016 .format(name, obj)) 

1017 try: 

1018 parent = obj 

1019 obj = getattr(obj, subpath) 

1020 except AttributeError: 

1021 raise AttributeError("Can't get attribute {!r} on {!r}" 

1022 .format(name, obj)) 

1023 return obj, parent 

1024 

1025def _locate_function(obj, pickler=None): 

1026 module_name = getattr(obj, '__module__', None) 

1027 if module_name in ['__main__', None] or \ 

1028 pickler and is_dill(pickler, child=False) and pickler._session and module_name == pickler._main.__name__: 

1029 return False 

1030 if hasattr(obj, '__qualname__'): 

1031 module = _import_module(module_name, safe=True) 

1032 try: 

1033 found, _ = _getattribute(module, obj.__qualname__) 

1034 return found is obj 

1035 except AttributeError: 

1036 return False 

1037 else: 

1038 found = _import_module(module_name + '.' + obj.__name__, safe=True) 

1039 return found is obj 

1040 

1041 

1042def _setitems(dest, source): 

1043 for k, v in source.items(): 

1044 dest[k] = v 

1045 

1046 

1047def _save_with_postproc(pickler, reduction, is_pickler_dill=None, obj=Getattr.NO_DEFAULT, postproc_list=None): 

1048 if obj is Getattr.NO_DEFAULT: 

1049 obj = Reduce(reduction) # pragma: no cover 

1050 

1051 if is_pickler_dill is None: 

1052 is_pickler_dill = is_dill(pickler, child=True) 

1053 if is_pickler_dill: 

1054 # assert id(obj) not in pickler._postproc, str(obj) + ' already pushed on stack!' 

1055 # if not hasattr(pickler, 'x'): pickler.x = 0 

1056 # print(pickler.x*' ', 'push', obj, id(obj), pickler._recurse) 

1057 # pickler.x += 1 

1058 if postproc_list is None: 

1059 postproc_list = [] 

1060 

1061 # Recursive object not supported. Default to a global instead. 

1062 if id(obj) in pickler._postproc: 

1063 name = '%s.%s ' % (obj.__module__, getattr(obj, '__qualname__', obj.__name__)) if hasattr(obj, '__module__') else '' 

1064 warnings.warn('Cannot pickle %r: %shas recursive self-references that trigger a RecursionError.' % (obj, name), PicklingWarning) 

1065 pickler.save_global(obj) 

1066 return 

1067 pickler._postproc[id(obj)] = postproc_list 

1068 

1069 # TODO: Use state_setter in Python 3.8 to allow for faster cPickle implementations 

1070 pickler.save_reduce(*reduction, obj=obj) 

1071 

1072 if is_pickler_dill: 

1073 # pickler.x -= 1 

1074 # print(pickler.x*' ', 'pop', obj, id(obj)) 

1075 postproc = pickler._postproc.pop(id(obj)) 

1076 # assert postproc_list == postproc, 'Stack tampered!' 

1077 for reduction in reversed(postproc): 

1078 if reduction[0] is _setitems: 

1079 # use the internal machinery of pickle.py to speedup when 

1080 # updating a dictionary in postproc 

1081 dest, source = reduction[1] 

1082 if source: 

1083 pickler.write(pickler.get(pickler.memo[id(dest)][0])) 

1084 pickler._batch_setitems(iter(source.items())) 

1085 else: 

1086 # Updating with an empty dictionary. Same as doing nothing. 

1087 continue 

1088 else: 

1089 pickler.save_reduce(*reduction) 

1090 # pop None created by calling preprocessing step off stack 

1091 pickler.write(bytes('0', 'UTF-8')) 

1092 

1093#@register(CodeType) 

1094#def save_code(pickler, obj): 

1095# logger.trace(pickler, "Co: %s", obj) 

1096# pickler.save_reduce(_unmarshal, (marshal.dumps(obj),), obj=obj) 

1097# logger.trace(pickler, "# Co") 

1098# return 

1099 

1100# The following function is based on 'save_codeobject' from 'cloudpickle' 

1101# Copyright (c) 2012, Regents of the University of California. 

1102# Copyright (c) 2009 `PiCloud, Inc. <http://www.picloud.com>`_. 

1103# License: https://github.com/cloudpipe/cloudpickle/blob/master/LICENSE 

1104@register(CodeType) 

1105def save_code(pickler, obj): 

1106 logger.trace(pickler, "Co: %s", obj) 

1107 if hasattr(obj, "co_endlinetable"): # python 3.11a (20 args) 

1108 args = ( 

1109 obj.co_lnotab, # for < python 3.10 [not counted in args] 

1110 obj.co_argcount, obj.co_posonlyargcount, 

1111 obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize, 

1112 obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, 

1113 obj.co_varnames, obj.co_filename, obj.co_name, obj.co_qualname, 

1114 obj.co_firstlineno, obj.co_linetable, obj.co_endlinetable, 

1115 obj.co_columntable, obj.co_exceptiontable, obj.co_freevars, 

1116 obj.co_cellvars 

1117 ) 

1118 elif hasattr(obj, "co_exceptiontable"): # python 3.11 (18 args) 

1119 args = ( 

1120 obj.co_lnotab, # for < python 3.10 [not counted in args] 

1121 obj.co_argcount, obj.co_posonlyargcount, 

1122 obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize, 

1123 obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, 

1124 obj.co_varnames, obj.co_filename, obj.co_name, obj.co_qualname, 

1125 obj.co_firstlineno, obj.co_linetable, obj.co_exceptiontable, 

1126 obj.co_freevars, obj.co_cellvars 

1127 ) 

1128 elif hasattr(obj, "co_linetable"): # python 3.10 (16 args) 

1129 args = ( 

1130 obj.co_lnotab, # for < python 3.10 [not counted in args] 

1131 obj.co_argcount, obj.co_posonlyargcount, 

1132 obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize, 

1133 obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, 

1134 obj.co_varnames, obj.co_filename, obj.co_name, 

1135 obj.co_firstlineno, obj.co_linetable, obj.co_freevars, 

1136 obj.co_cellvars 

1137 ) 

1138 elif hasattr(obj, "co_posonlyargcount"): # python 3.8 (16 args) 

1139 args = ( 

1140 obj.co_argcount, obj.co_posonlyargcount, 

1141 obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize, 

1142 obj.co_flags, obj.co_code, obj.co_consts, obj.co_names, 

1143 obj.co_varnames, obj.co_filename, obj.co_name, 

1144 obj.co_firstlineno, obj.co_lnotab, obj.co_freevars, 

1145 obj.co_cellvars 

1146 ) 

1147 else: # python 3.7 (15 args) 

1148 args = ( 

1149 obj.co_argcount, obj.co_kwonlyargcount, obj.co_nlocals, 

1150 obj.co_stacksize, obj.co_flags, obj.co_code, obj.co_consts, 

1151 obj.co_names, obj.co_varnames, obj.co_filename, 

1152 obj.co_name, obj.co_firstlineno, obj.co_lnotab, 

1153 obj.co_freevars, obj.co_cellvars 

1154 ) 

1155 

1156 pickler.save_reduce(_create_code, args, obj=obj) 

1157 logger.trace(pickler, "# Co") 

1158 return 

1159 

1160def _repr_dict(obj): 

1161 """make a short string representation of a dictionary""" 

1162 return "<%s object at %#012x>" % (type(obj).__name__, id(obj)) 

1163 

1164@register(dict) 

1165def save_module_dict(pickler, obj): 

1166 if is_dill(pickler, child=False) and obj == pickler._main.__dict__ and \ 

1167 not (pickler._session and pickler._first_pass): 

1168 logger.trace(pickler, "D1: %s", _repr_dict(obj)) # obj 

1169 pickler.write(bytes('c__builtin__\n__main__\n', 'UTF-8')) 

1170 logger.trace(pickler, "# D1") 

1171 elif (not is_dill(pickler, child=False)) and (obj == _main_module.__dict__): 

1172 logger.trace(pickler, "D3: %s", _repr_dict(obj)) # obj 

1173 pickler.write(bytes('c__main__\n__dict__\n', 'UTF-8')) #XXX: works in general? 

1174 logger.trace(pickler, "# D3") 

1175 elif '__name__' in obj and obj != _main_module.__dict__ \ 

1176 and type(obj['__name__']) is str \ 

1177 and obj is getattr(_import_module(obj['__name__'],True), '__dict__', None): 

1178 logger.trace(pickler, "D4: %s", _repr_dict(obj)) # obj 

1179 pickler.write(bytes('c%s\n__dict__\n' % obj['__name__'], 'UTF-8')) 

1180 logger.trace(pickler, "# D4") 

1181 else: 

1182 logger.trace(pickler, "D2: %s", _repr_dict(obj)) # obj 

1183 if is_dill(pickler, child=False) and pickler._session: 

1184 # we only care about session the first pass thru 

1185 pickler._first_pass = False 

1186 StockPickler.save_dict(pickler, obj) 

1187 logger.trace(pickler, "# D2") 

1188 return 

1189 

1190 

1191if not OLD310 and MAPPING_PROXY_TRICK: 

1192 def save_dict_view(dicttype): 

1193 def save_dict_view_for_function(func): 

1194 def _save_dict_view(pickler, obj): 

1195 logger.trace(pickler, "Dkvi: <%s>", obj) 

1196 mapping = obj.mapping | _dictproxy_helper_instance 

1197 pickler.save_reduce(func, (mapping,), obj=obj) 

1198 logger.trace(pickler, "# Dkvi") 

1199 return _save_dict_view 

1200 return [ 

1201 (funcname, save_dict_view_for_function(getattr(dicttype, funcname))) 

1202 for funcname in ('keys', 'values', 'items') 

1203 ] 

1204else: 

1205 # The following functions are based on 'cloudpickle' 

1206 # https://github.com/cloudpipe/cloudpickle/blob/5d89947288a18029672596a4d719093cc6d5a412/cloudpickle/cloudpickle.py#L922-L940 

1207 # Copyright (c) 2012, Regents of the University of California. 

1208 # Copyright (c) 2009 `PiCloud, Inc. <http://www.picloud.com>`_. 

1209 # License: https://github.com/cloudpipe/cloudpickle/blob/master/LICENSE 

1210 def save_dict_view(dicttype): 

1211 def save_dict_keys(pickler, obj): 

1212 logger.trace(pickler, "Dk: <%s>", obj) 

1213 dict_constructor = _shims.Reduce(dicttype.fromkeys, (list(obj),)) 

1214 pickler.save_reduce(dicttype.keys, (dict_constructor,), obj=obj) 

1215 logger.trace(pickler, "# Dk") 

1216 

1217 def save_dict_values(pickler, obj): 

1218 logger.trace(pickler, "Dv: <%s>", obj) 

1219 dict_constructor = _shims.Reduce(dicttype, (enumerate(obj),)) 

1220 pickler.save_reduce(dicttype.values, (dict_constructor,), obj=obj) 

1221 logger.trace(pickler, "# Dv") 

1222 

1223 def save_dict_items(pickler, obj): 

1224 logger.trace(pickler, "Di: <%s>", obj) 

1225 pickler.save_reduce(dicttype.items, (dicttype(obj),), obj=obj) 

1226 logger.trace(pickler, "# Di") 

1227 

1228 return ( 

1229 ('keys', save_dict_keys), 

1230 ('values', save_dict_values), 

1231 ('items', save_dict_items) 

1232 ) 

1233 

1234for __dicttype in ( 

1235 dict, 

1236 OrderedDict 

1237): 

1238 __obj = __dicttype() 

1239 for __funcname, __savefunc in save_dict_view(__dicttype): 

1240 __tview = type(getattr(__obj, __funcname)()) 

1241 if __tview not in Pickler.dispatch: 

1242 Pickler.dispatch[__tview] = __savefunc 

1243del __dicttype, __obj, __funcname, __tview, __savefunc 

1244 

1245 

1246@register(ClassType) 

1247def save_classobj(pickler, obj): #FIXME: enable pickler._byref 

1248 if not _locate_function(obj, pickler): 

1249 logger.trace(pickler, "C1: %s", obj) 

1250 pickler.save_reduce(ClassType, (obj.__name__, obj.__bases__, 

1251 obj.__dict__), obj=obj) 

1252 #XXX: or obj.__dict__.copy()), obj=obj) ? 

1253 logger.trace(pickler, "# C1") 

1254 else: 

1255 logger.trace(pickler, "C2: %s", obj) 

1256 name = getattr(obj, '__qualname__', getattr(obj, '__name__', None)) 

1257 StockPickler.save_global(pickler, obj, name=name) 

1258 logger.trace(pickler, "# C2") 

1259 return 

1260 

1261@register(typing._GenericAlias) 

1262def save_generic_alias(pickler, obj): 

1263 args = obj.__args__ 

1264 if type(obj.__reduce__()) is str: 

1265 logger.trace(pickler, "Ga0: %s", obj) 

1266 StockPickler.save_global(pickler, obj, name=obj.__reduce__()) 

1267 logger.trace(pickler, "# Ga0") 

1268 elif obj.__origin__ is tuple and (not args or args == ((),)): 

1269 logger.trace(pickler, "Ga1: %s", obj) 

1270 pickler.save_reduce(_create_typing_tuple, (args,), obj=obj) 

1271 logger.trace(pickler, "# Ga1") 

1272 else: 

1273 logger.trace(pickler, "Ga2: %s", obj) 

1274 StockPickler.save_reduce(pickler, *obj.__reduce__(), obj=obj) 

1275 logger.trace(pickler, "# Ga2") 

1276 return 

1277 

1278@register(LockType) 

1279def save_lock(pickler, obj): 

1280 logger.trace(pickler, "Lo: %s", obj) 

1281 pickler.save_reduce(_create_lock, (obj.locked(),), obj=obj) 

1282 logger.trace(pickler, "# Lo") 

1283 return 

1284 

1285@register(RLockType) 

1286def save_rlock(pickler, obj): 

1287 logger.trace(pickler, "RL: %s", obj) 

1288 r = obj.__repr__() # don't use _release_save as it unlocks the lock 

1289 count = int(r.split('count=')[1].split()[0].rstrip('>')) 

1290 owner = int(r.split('owner=')[1].split()[0]) 

1291 pickler.save_reduce(_create_rlock, (count,owner,), obj=obj) 

1292 logger.trace(pickler, "# RL") 

1293 return 

1294 

1295#@register(SocketType) #FIXME: causes multiprocess test_pickling FAIL 

1296def save_socket(pickler, obj): 

1297 logger.trace(pickler, "So: %s", obj) 

1298 pickler.save_reduce(*reduce_socket(obj)) 

1299 logger.trace(pickler, "# So") 

1300 return 

1301 

1302def _save_file(pickler, obj, open_): 

1303 if obj.closed: 

1304 position = 0 

1305 else: 

1306 obj.flush() 

1307 if obj in (sys.__stdout__, sys.__stderr__, sys.__stdin__): 

1308 position = -1 

1309 else: 

1310 position = obj.tell() 

1311 if is_dill(pickler, child=True) and pickler._fmode == FILE_FMODE: 

1312 f = open_(obj.name, "r") 

1313 fdata = f.read() 

1314 f.close() 

1315 else: 

1316 fdata = "" 

1317 if is_dill(pickler, child=True): 

1318 strictio = pickler._strictio 

1319 fmode = pickler._fmode 

1320 else: 

1321 strictio = False 

1322 fmode = 0 # HANDLE_FMODE 

1323 pickler.save_reduce(_create_filehandle, (obj.name, obj.mode, position, 

1324 obj.closed, open_, strictio, 

1325 fmode, fdata), obj=obj) 

1326 return 

1327 

1328 

1329@register(FileType) #XXX: in 3.x has buffer=0, needs different _create? 

1330@register(BufferedRandomType) 

1331@register(BufferedReaderType) 

1332@register(BufferedWriterType) 

1333@register(TextWrapperType) 

1334def save_file(pickler, obj): 

1335 logger.trace(pickler, "Fi: %s", obj) 

1336 f = _save_file(pickler, obj, open) 

1337 logger.trace(pickler, "# Fi") 

1338 return f 

1339 

1340if PyTextWrapperType: 

1341 @register(PyBufferedRandomType) 

1342 @register(PyBufferedReaderType) 

1343 @register(PyBufferedWriterType) 

1344 @register(PyTextWrapperType) 

1345 def save_file(pickler, obj): 

1346 logger.trace(pickler, "Fi: %s", obj) 

1347 f = _save_file(pickler, obj, _open) 

1348 logger.trace(pickler, "# Fi") 

1349 return f 

1350 

1351# The following two functions are based on 'saveCStringIoInput' 

1352# and 'saveCStringIoOutput' from spickle 

1353# Copyright (c) 2011 by science+computing ag 

1354# License: http://www.apache.org/licenses/LICENSE-2.0 

1355if InputType: 

1356 @register(InputType) 

1357 def save_stringi(pickler, obj): 

1358 logger.trace(pickler, "Io: %s", obj) 

1359 if obj.closed: 

1360 value = ''; position = 0 

1361 else: 

1362 value = obj.getvalue(); position = obj.tell() 

1363 pickler.save_reduce(_create_stringi, (value, position, \ 

1364 obj.closed), obj=obj) 

1365 logger.trace(pickler, "# Io") 

1366 return 

1367 

1368 @register(OutputType) 

1369 def save_stringo(pickler, obj): 

1370 logger.trace(pickler, "Io: %s", obj) 

1371 if obj.closed: 

1372 value = ''; position = 0 

1373 else: 

1374 value = obj.getvalue(); position = obj.tell() 

1375 pickler.save_reduce(_create_stringo, (value, position, \ 

1376 obj.closed), obj=obj) 

1377 logger.trace(pickler, "# Io") 

1378 return 

1379 

1380if LRUCacheType is not None: 

1381 from functools import lru_cache 

1382 @register(LRUCacheType) 

1383 def save_lru_cache(pickler, obj): 

1384 logger.trace(pickler, "LRU: %s", obj) 

1385 if OLD39: 

1386 kwargs = obj.cache_info() 

1387 args = (kwargs.maxsize,) 

1388 else: 

1389 kwargs = obj.cache_parameters() 

1390 args = (kwargs['maxsize'], kwargs['typed']) 

1391 if args != lru_cache.__defaults__: 

1392 wrapper = Reduce(lru_cache, args, is_callable=True) 

1393 else: 

1394 wrapper = lru_cache 

1395 pickler.save_reduce(wrapper, (obj.__wrapped__,), obj=obj) 

1396 logger.trace(pickler, "# LRU") 

1397 return 

1398 

1399@register(SuperType) 

1400def save_super(pickler, obj): 

1401 logger.trace(pickler, "Su: %s", obj) 

1402 pickler.save_reduce(super, (obj.__thisclass__, obj.__self__), obj=obj) 

1403 logger.trace(pickler, "# Su") 

1404 return 

1405 

1406if IS_PYPY: 

1407 @register(MethodType) 

1408 def save_instancemethod0(pickler, obj): 

1409 code = getattr(obj.__func__, '__code__', None) 

1410 if code is not None and type(code) is not CodeType \ 

1411 and getattr(obj.__self__, obj.__name__) == obj: 

1412 # Some PyPy builtin functions have no module name 

1413 logger.trace(pickler, "Me2: %s", obj) 

1414 # TODO: verify that this works for all PyPy builtin methods 

1415 pickler.save_reduce(getattr, (obj.__self__, obj.__name__), obj=obj) 

1416 logger.trace(pickler, "# Me2") 

1417 return 

1418 

1419 logger.trace(pickler, "Me1: %s", obj) 

1420 pickler.save_reduce(MethodType, (obj.__func__, obj.__self__), obj=obj) 

1421 logger.trace(pickler, "# Me1") 

1422 return 

1423else: 

1424 @register(MethodType) 

1425 def save_instancemethod0(pickler, obj): 

1426 logger.trace(pickler, "Me1: %s", obj) 

1427 pickler.save_reduce(MethodType, (obj.__func__, obj.__self__), obj=obj) 

1428 logger.trace(pickler, "# Me1") 

1429 return 

1430 

1431if not IS_PYPY: 

1432 @register(MemberDescriptorType) 

1433 @register(GetSetDescriptorType) 

1434 @register(MethodDescriptorType) 

1435 @register(WrapperDescriptorType) 

1436 @register(ClassMethodDescriptorType) 

1437 def save_wrapper_descriptor(pickler, obj): 

1438 logger.trace(pickler, "Wr: %s", obj) 

1439 pickler.save_reduce(_getattr, (obj.__objclass__, obj.__name__, 

1440 obj.__repr__()), obj=obj) 

1441 logger.trace(pickler, "# Wr") 

1442 return 

1443else: 

1444 @register(MemberDescriptorType) 

1445 @register(GetSetDescriptorType) 

1446 def save_wrapper_descriptor(pickler, obj): 

1447 logger.trace(pickler, "Wr: %s", obj) 

1448 pickler.save_reduce(_getattr, (obj.__objclass__, obj.__name__, 

1449 obj.__repr__()), obj=obj) 

1450 logger.trace(pickler, "# Wr") 

1451 return 

1452 

1453@register(CellType) 

1454def save_cell(pickler, obj): 

1455 try: 

1456 f = obj.cell_contents 

1457 except ValueError: # cell is empty 

1458 logger.trace(pickler, "Ce3: %s", obj) 

1459 # _shims._CELL_EMPTY is defined in _shims.py to support PyPy 2.7. 

1460 # It unpickles to a sentinel object _dill._CELL_EMPTY, also created in 

1461 # _shims.py. This object is not present in Python 3 because the cell's 

1462 # contents can be deleted in newer versions of Python. The reduce object 

1463 # will instead unpickle to None if unpickled in Python 3. 

1464 

1465 # When breaking changes are made to dill, (_shims._CELL_EMPTY,) can 

1466 # be replaced by () OR the delattr function can be removed repending on 

1467 # whichever is more convienient. 

1468 pickler.save_reduce(_create_cell, (_shims._CELL_EMPTY,), obj=obj) 

1469 # Call the function _delattr on the cell's cell_contents attribute 

1470 # The result of this function call will be None 

1471 pickler.save_reduce(_shims._delattr, (obj, 'cell_contents')) 

1472 # pop None created by calling _delattr off stack 

1473 pickler.write(bytes('0', 'UTF-8')) 

1474 logger.trace(pickler, "# Ce3") 

1475 return 

1476 if is_dill(pickler, child=True): 

1477 if id(f) in pickler._postproc: 

1478 # Already seen. Add to its postprocessing. 

1479 postproc = pickler._postproc[id(f)] 

1480 else: 

1481 # Haven't seen it. Add to the highest possible object and set its 

1482 # value as late as possible to prevent cycle. 

1483 postproc = next(iter(pickler._postproc.values()), None) 

1484 if postproc is not None: 

1485 logger.trace(pickler, "Ce2: %s", obj) 

1486 # _CELL_REF is defined in _shims.py to support older versions of 

1487 # dill. When breaking changes are made to dill, (_CELL_REF,) can 

1488 # be replaced by () 

1489 pickler.save_reduce(_create_cell, (_CELL_REF,), obj=obj) 

1490 postproc.append((_shims._setattr, (obj, 'cell_contents', f))) 

1491 logger.trace(pickler, "# Ce2") 

1492 return 

1493 logger.trace(pickler, "Ce1: %s", obj) 

1494 pickler.save_reduce(_create_cell, (f,), obj=obj) 

1495 logger.trace(pickler, "# Ce1") 

1496 return 

1497 

1498if MAPPING_PROXY_TRICK: 

1499 @register(DictProxyType) 

1500 def save_dictproxy(pickler, obj): 

1501 logger.trace(pickler, "Mp: %s", _repr_dict(obj)) # obj 

1502 mapping = obj | _dictproxy_helper_instance 

1503 pickler.save_reduce(DictProxyType, (mapping,), obj=obj) 

1504 logger.trace(pickler, "# Mp") 

1505 return 

1506else: 

1507 @register(DictProxyType) 

1508 def save_dictproxy(pickler, obj): 

1509 logger.trace(pickler, "Mp: %s", _repr_dict(obj)) # obj 

1510 pickler.save_reduce(DictProxyType, (obj.copy(),), obj=obj) 

1511 logger.trace(pickler, "# Mp") 

1512 return 

1513 

1514@register(SliceType) 

1515def save_slice(pickler, obj): 

1516 logger.trace(pickler, "Sl: %s", obj) 

1517 pickler.save_reduce(slice, (obj.start, obj.stop, obj.step), obj=obj) 

1518 logger.trace(pickler, "# Sl") 

1519 return 

1520 

1521@register(XRangeType) 

1522@register(EllipsisType) 

1523@register(NotImplementedType) 

1524def save_singleton(pickler, obj): 

1525 logger.trace(pickler, "Si: %s", obj) 

1526 pickler.save_reduce(_eval_repr, (obj.__repr__(),), obj=obj) 

1527 logger.trace(pickler, "# Si") 

1528 return 

1529 

1530def _proxy_helper(obj): # a dead proxy returns a reference to None 

1531 """get memory address of proxy's reference object""" 

1532 _repr = repr(obj) 

1533 try: _str = str(obj) 

1534 except ReferenceError: # it's a dead proxy 

1535 return id(None) 

1536 if _str == _repr: return id(obj) # it's a repr 

1537 try: # either way, it's a proxy from here 

1538 address = int(_str.rstrip('>').split(' at ')[-1], base=16) 

1539 except ValueError: # special case: proxy of a 'type' 

1540 if not IS_PYPY: 

1541 address = int(_repr.rstrip('>').split(' at ')[-1], base=16) 

1542 else: 

1543 objects = iter(gc.get_objects()) 

1544 for _obj in objects: 

1545 if repr(_obj) == _str: return id(_obj) 

1546 # all bad below... nothing found so throw ReferenceError 

1547 msg = "Cannot reference object for proxy at '%s'" % id(obj) 

1548 raise ReferenceError(msg) 

1549 return address 

1550 

1551def _locate_object(address, module=None): 

1552 """get object located at the given memory address (inverse of id(obj))""" 

1553 special = [None, True, False] #XXX: more...? 

1554 for obj in special: 

1555 if address == id(obj): return obj 

1556 if module: 

1557 objects = iter(module.__dict__.values()) 

1558 else: objects = iter(gc.get_objects()) 

1559 for obj in objects: 

1560 if address == id(obj): return obj 

1561 # all bad below... nothing found so throw ReferenceError or TypeError 

1562 try: address = hex(address) 

1563 except TypeError: 

1564 raise TypeError("'%s' is not a valid memory address" % str(address)) 

1565 raise ReferenceError("Cannot reference object at '%s'" % address) 

1566 

1567@register(ReferenceType) 

1568def save_weakref(pickler, obj): 

1569 refobj = obj() 

1570 logger.trace(pickler, "R1: %s", obj) 

1571 #refobj = ctypes.pythonapi.PyWeakref_GetObject(obj) # dead returns "None" 

1572 pickler.save_reduce(_create_weakref, (refobj,), obj=obj) 

1573 logger.trace(pickler, "# R1") 

1574 return 

1575 

1576@register(ProxyType) 

1577@register(CallableProxyType) 

1578def save_weakproxy(pickler, obj): 

1579 # Must do string substitution here and use %r to avoid ReferenceError. 

1580 logger.trace(pickler, "R2: %r" % obj) 

1581 refobj = _locate_object(_proxy_helper(obj)) 

1582 pickler.save_reduce(_create_weakproxy, (refobj, callable(obj)), obj=obj) 

1583 logger.trace(pickler, "# R2") 

1584 return 

1585 

1586def _is_builtin_module(module): 

1587 if not hasattr(module, "__file__"): return True 

1588 # If a module file name starts with prefix, it should be a builtin 

1589 # module, so should always be pickled as a reference. 

1590 names = ["base_prefix", "base_exec_prefix", "exec_prefix", "prefix", "real_prefix"] 

1591 return any(os.path.realpath(module.__file__).startswith(os.path.realpath(getattr(sys, name))) 

1592 for name in names if hasattr(sys, name)) or \ 

1593 module.__file__.endswith(EXTENSION_SUFFIXES) or \ 

1594 'site-packages' in module.__file__ 

1595 

1596def _is_imported_module(module): 

1597 return getattr(module, '__loader__', None) is not None or module in sys.modules.values() 

1598 

1599@register(ModuleType) 

1600def save_module(pickler, obj): 

1601 if False: #_use_diff: 

1602 if obj.__name__.split('.', 1)[0] != "dill": 

1603 try: 

1604 changed = diff.whats_changed(obj, seen=pickler._diff_cache)[0] 

1605 except RuntimeError: # not memorised module, probably part of dill 

1606 pass 

1607 else: 

1608 logger.trace(pickler, "M2: %s with diff", obj) 

1609 logger.trace(pickler, "Diff: %s", changed.keys()) 

1610 pickler.save_reduce(_import_module, (obj.__name__,), obj=obj, 

1611 state=changed) 

1612 logger.trace(pickler, "# M2") 

1613 return 

1614 

1615 logger.trace(pickler, "M1: %s", obj) 

1616 pickler.save_reduce(_import_module, (obj.__name__,), obj=obj) 

1617 logger.trace(pickler, "# M1") 

1618 else: 

1619 builtin_mod = _is_builtin_module(obj) 

1620 if obj.__name__ not in ("builtins", "dill", "dill._dill") and not builtin_mod or \ 

1621 is_dill(pickler, child=True) and obj is pickler._main: 

1622 logger.trace(pickler, "M1: %s", obj) 

1623 _main_dict = obj.__dict__.copy() #XXX: better no copy? option to copy? 

1624 [_main_dict.pop(item, None) for item in singletontypes 

1625 + ["__builtins__", "__loader__"]] 

1626 mod_name = obj.__name__ if _is_imported_module(obj) else '__runtime__.%s' % obj.__name__ 

1627 pickler.save_reduce(_import_module, (mod_name,), obj=obj, 

1628 state=_main_dict) 

1629 logger.trace(pickler, "# M1") 

1630 elif obj.__name__ == "dill._dill": 

1631 logger.trace(pickler, "M2: %s", obj) 

1632 pickler.save_global(obj, name="_dill") 

1633 logger.trace(pickler, "# M2") 

1634 else: 

1635 logger.trace(pickler, "M2: %s", obj) 

1636 pickler.save_reduce(_import_module, (obj.__name__,), obj=obj) 

1637 logger.trace(pickler, "# M2") 

1638 return 

1639 return 

1640 

1641@register(TypeType) 

1642def save_type(pickler, obj, postproc_list=None): 

1643 if obj in _typemap: 

1644 logger.trace(pickler, "T1: %s", obj) 

1645 # if obj in _incedental_types: 

1646 # warnings.warn('Type %r may only exist on this implementation of Python and cannot be unpickled in other implementations.' % (obj,), PicklingWarning) 

1647 pickler.save_reduce(_load_type, (_typemap[obj],), obj=obj) 

1648 logger.trace(pickler, "# T1") 

1649 elif obj.__bases__ == (tuple,) and all([hasattr(obj, attr) for attr in ('_fields','_asdict','_make','_replace')]): 

1650 # special case: namedtuples 

1651 logger.trace(pickler, "T6: %s", obj) 

1652 if not obj._field_defaults: 

1653 pickler.save_reduce(_create_namedtuple, (obj.__name__, obj._fields, obj.__module__), obj=obj) 

1654 else: 

1655 defaults = [obj._field_defaults[field] for field in obj._fields if field in obj._field_defaults] 

1656 pickler.save_reduce(_create_namedtuple, (obj.__name__, obj._fields, obj.__module__, defaults), obj=obj) 

1657 logger.trace(pickler, "# T6") 

1658 return 

1659 

1660 # special cases: NoneType, NotImplementedType, EllipsisType 

1661 elif obj is type(None): 

1662 logger.trace(pickler, "T7: %s", obj) 

1663 #XXX: pickler.save_reduce(type, (None,), obj=obj) 

1664 pickler.write(bytes('c__builtin__\nNoneType\n', 'UTF-8')) 

1665 logger.trace(pickler, "# T7") 

1666 elif obj is NotImplementedType: 

1667 logger.trace(pickler, "T7: %s", obj) 

1668 pickler.save_reduce(type, (NotImplemented,), obj=obj) 

1669 logger.trace(pickler, "# T7") 

1670 elif obj is EllipsisType: 

1671 logger.trace(pickler, "T7: %s", obj) 

1672 pickler.save_reduce(type, (Ellipsis,), obj=obj) 

1673 logger.trace(pickler, "# T7") 

1674 

1675 else: 

1676 obj_name = getattr(obj, '__qualname__', getattr(obj, '__name__', None)) 

1677 _byref = getattr(pickler, '_byref', None) 

1678 obj_recursive = id(obj) in getattr(pickler, '_postproc', ()) 

1679 incorrectly_named = not _locate_function(obj, pickler) 

1680 if not _byref and not obj_recursive and incorrectly_named: # not a function, but the name was held over 

1681 # thanks to Tom Stepleton pointing out pickler._session unneeded 

1682 logger.trace(pickler, "T2: %s", obj) 

1683 _dict = obj.__dict__.copy() # convert dictproxy to dict 

1684 #print (_dict) 

1685 #print ("%s\n%s" % (type(obj), obj.__name__)) 

1686 #print ("%s\n%s" % (obj.__bases__, obj.__dict__)) 

1687 slots = _dict.get('__slots__', ()) 

1688 if type(slots) == str: slots = (slots,) # __slots__ accepts a single string 

1689 for name in slots: 

1690 del _dict[name] 

1691 _dict.pop('__dict__', None) 

1692 _dict.pop('__weakref__', None) 

1693 _dict.pop('__prepare__', None) 

1694 if obj_name != obj.__name__: 

1695 if postproc_list is None: 

1696 postproc_list = [] 

1697 postproc_list.append((setattr, (obj, '__qualname__', obj_name))) 

1698 _save_with_postproc(pickler, (_create_type, ( 

1699 type(obj), obj.__name__, obj.__bases__, _dict 

1700 )), obj=obj, postproc_list=postproc_list) 

1701 logger.trace(pickler, "# T2") 

1702 else: 

1703 logger.trace(pickler, "T4: %s", obj) 

1704 if incorrectly_named: 

1705 warnings.warn('Cannot locate reference to %r.' % (obj,), PicklingWarning) 

1706 if obj_recursive: 

1707 warnings.warn('Cannot pickle %r: %s.%s has recursive self-references that trigger a RecursionError.' % (obj, obj.__module__, obj_name), PicklingWarning) 

1708 #print (obj.__dict__) 

1709 #print ("%s\n%s" % (type(obj), obj.__name__)) 

1710 #print ("%s\n%s" % (obj.__bases__, obj.__dict__)) 

1711 StockPickler.save_global(pickler, obj, name=obj_name) 

1712 logger.trace(pickler, "# T4") 

1713 return 

1714 

1715@register(property) 

1716def save_property(pickler, obj): 

1717 logger.trace(pickler, "Pr: %s", obj) 

1718 pickler.save_reduce(property, (obj.fget, obj.fset, obj.fdel, obj.__doc__), 

1719 obj=obj) 

1720 logger.trace(pickler, "# Pr") 

1721 

1722@register(staticmethod) 

1723@register(classmethod) 

1724def save_classmethod(pickler, obj): 

1725 logger.trace(pickler, "Cm: %s", obj) 

1726 orig_func = obj.__func__ 

1727 

1728 # if type(obj.__dict__) is dict: 

1729 # if obj.__dict__: 

1730 # state = obj.__dict__ 

1731 # else: 

1732 # state = None 

1733 # else: 

1734 # state = (None, {'__dict__', obj.__dict__}) 

1735 

1736 pickler.save_reduce(type(obj), (orig_func,), obj=obj) 

1737 logger.trace(pickler, "# Cm") 

1738 

1739@register(FunctionType) 

1740def save_function(pickler, obj): 

1741 if not _locate_function(obj, pickler): 

1742 if type(obj.__code__) is not CodeType: 

1743 # Some PyPy builtin functions have no module name, and thus are not 

1744 # able to be located 

1745 module_name = getattr(obj, '__module__', None) 

1746 if module_name is None: 

1747 module_name = __builtin__.__name__ 

1748 module = _import_module(module_name, safe=True) 

1749 _pypy_builtin = False 

1750 try: 

1751 found, _ = _getattribute(module, obj.__qualname__) 

1752 if getattr(found, '__func__', None) is obj: 

1753 _pypy_builtin = True 

1754 except AttributeError: 

1755 pass 

1756 

1757 if _pypy_builtin: 

1758 logger.trace(pickler, "F3: %s", obj) 

1759 pickler.save_reduce(getattr, (found, '__func__'), obj=obj) 

1760 logger.trace(pickler, "# F3") 

1761 return 

1762 

1763 logger.trace(pickler, "F1: %s", obj) 

1764 _recurse = getattr(pickler, '_recurse', None) 

1765 _postproc = getattr(pickler, '_postproc', None) 

1766 _main_modified = getattr(pickler, '_main_modified', None) 

1767 _original_main = getattr(pickler, '_original_main', __builtin__)#'None' 

1768 postproc_list = [] 

1769 if _recurse: 

1770 # recurse to get all globals referred to by obj 

1771 from .detect import globalvars 

1772 globs_copy = globalvars(obj, recurse=True, builtin=True) 

1773 

1774 # Add the name of the module to the globs dictionary to prevent 

1775 # the duplication of the dictionary. Pickle the unpopulated 

1776 # globals dictionary and set the remaining items after the function 

1777 # is created to correctly handle recursion. 

1778 globs = {'__name__': obj.__module__} 

1779 else: 

1780 globs_copy = obj.__globals__ 

1781 

1782 # If the globals is the __dict__ from the module being saved as a 

1783 # session, substitute it by the dictionary being actually saved. 

1784 if _main_modified and globs_copy is _original_main.__dict__: 

1785 globs_copy = getattr(pickler, '_main', _original_main).__dict__ 

1786 globs = globs_copy 

1787 # If the globals is a module __dict__, do not save it in the pickle. 

1788 elif globs_copy is not None and obj.__module__ is not None and \ 

1789 getattr(_import_module(obj.__module__, True), '__dict__', None) is globs_copy: 

1790 globs = globs_copy 

1791 else: 

1792 globs = {'__name__': obj.__module__} 

1793 

1794 if globs_copy is not None and globs is not globs_copy: 

1795 # In the case that the globals are copied, we need to ensure that 

1796 # the globals dictionary is updated when all objects in the 

1797 # dictionary are already created. 

1798 glob_ids = {id(g) for g in globs_copy.values()} 

1799 for stack_element in _postproc: 

1800 if stack_element in glob_ids: 

1801 _postproc[stack_element].append((_setitems, (globs, globs_copy))) 

1802 break 

1803 else: 

1804 postproc_list.append((_setitems, (globs, globs_copy))) 

1805 

1806 closure = obj.__closure__ 

1807 state_dict = {} 

1808 for fattrname in ('__doc__', '__kwdefaults__', '__annotations__'): 

1809 fattr = getattr(obj, fattrname, None) 

1810 if fattr is not None: 

1811 state_dict[fattrname] = fattr 

1812 if obj.__qualname__ != obj.__name__: 

1813 state_dict['__qualname__'] = obj.__qualname__ 

1814 if '__name__' not in globs or obj.__module__ != globs['__name__']: 

1815 state_dict['__module__'] = obj.__module__ 

1816 

1817 state = obj.__dict__ 

1818 if type(state) is not dict: 

1819 state_dict['__dict__'] = state 

1820 state = None 

1821 if state_dict: 

1822 state = state, state_dict 

1823 

1824 _save_with_postproc(pickler, (_create_function, ( 

1825 obj.__code__, globs, obj.__name__, obj.__defaults__, 

1826 closure 

1827 ), state), obj=obj, postproc_list=postproc_list) 

1828 

1829 # Lift closure cell update to earliest function (#458) 

1830 if _postproc: 

1831 topmost_postproc = next(iter(_postproc.values()), None) 

1832 if closure and topmost_postproc: 

1833 for cell in closure: 

1834 possible_postproc = (setattr, (cell, 'cell_contents', obj)) 

1835 try: 

1836 topmost_postproc.remove(possible_postproc) 

1837 except ValueError: 

1838 continue 

1839 

1840 # Change the value of the cell 

1841 pickler.save_reduce(*possible_postproc) 

1842 # pop None created by calling preprocessing step off stack 

1843 pickler.write(bytes('0', 'UTF-8')) 

1844 

1845 logger.trace(pickler, "# F1") 

1846 else: 

1847 logger.trace(pickler, "F2: %s", obj) 

1848 name = getattr(obj, '__qualname__', getattr(obj, '__name__', None)) 

1849 StockPickler.save_global(pickler, obj, name=name) 

1850 logger.trace(pickler, "# F2") 

1851 return 

1852 

1853if HAS_CTYPES and hasattr(ctypes, 'pythonapi'): 

1854 _PyCapsule_New = ctypes.pythonapi.PyCapsule_New 

1855 _PyCapsule_New.argtypes = (ctypes.c_void_p, ctypes.c_char_p, ctypes.c_void_p) 

1856 _PyCapsule_New.restype = ctypes.py_object 

1857 _PyCapsule_GetPointer = ctypes.pythonapi.PyCapsule_GetPointer 

1858 _PyCapsule_GetPointer.argtypes = (ctypes.py_object, ctypes.c_char_p) 

1859 _PyCapsule_GetPointer.restype = ctypes.c_void_p 

1860 _PyCapsule_GetDestructor = ctypes.pythonapi.PyCapsule_GetDestructor 

1861 _PyCapsule_GetDestructor.argtypes = (ctypes.py_object,) 

1862 _PyCapsule_GetDestructor.restype = ctypes.c_void_p 

1863 _PyCapsule_GetContext = ctypes.pythonapi.PyCapsule_GetContext 

1864 _PyCapsule_GetContext.argtypes = (ctypes.py_object,) 

1865 _PyCapsule_GetContext.restype = ctypes.c_void_p 

1866 _PyCapsule_GetName = ctypes.pythonapi.PyCapsule_GetName 

1867 _PyCapsule_GetName.argtypes = (ctypes.py_object,) 

1868 _PyCapsule_GetName.restype = ctypes.c_char_p 

1869 _PyCapsule_IsValid = ctypes.pythonapi.PyCapsule_IsValid 

1870 _PyCapsule_IsValid.argtypes = (ctypes.py_object, ctypes.c_char_p) 

1871 _PyCapsule_IsValid.restype = ctypes.c_bool 

1872 _PyCapsule_SetContext = ctypes.pythonapi.PyCapsule_SetContext 

1873 _PyCapsule_SetContext.argtypes = (ctypes.py_object, ctypes.c_void_p) 

1874 _PyCapsule_SetDestructor = ctypes.pythonapi.PyCapsule_SetDestructor 

1875 _PyCapsule_SetDestructor.argtypes = (ctypes.py_object, ctypes.c_void_p) 

1876 _PyCapsule_SetName = ctypes.pythonapi.PyCapsule_SetName 

1877 _PyCapsule_SetName.argtypes = (ctypes.py_object, ctypes.c_char_p) 

1878 _PyCapsule_SetPointer = ctypes.pythonapi.PyCapsule_SetPointer 

1879 _PyCapsule_SetPointer.argtypes = (ctypes.py_object, ctypes.c_void_p) 

1880 _testcapsule = _PyCapsule_New( 

1881 ctypes.cast(_PyCapsule_New, ctypes.c_void_p), 

1882 ctypes.create_string_buffer(b'dill._dill._testcapsule'), 

1883 None 

1884 ) 

1885 PyCapsuleType = type(_testcapsule) 

1886 @register(PyCapsuleType) 

1887 def save_capsule(pickler, obj): 

1888 logger.trace(pickler, "Cap: %s", obj) 

1889 name = _PyCapsule_GetName(obj) 

1890 warnings.warn('Pickling a PyCapsule (%s) does not pickle any C data structures and could cause segmentation faults or other memory errors when unpickling.' % (name,), PicklingWarning) 

1891 pointer = _PyCapsule_GetPointer(obj, name) 

1892 context = _PyCapsule_GetContext(obj) 

1893 destructor = _PyCapsule_GetDestructor(obj) 

1894 pickler.save_reduce(_create_capsule, (pointer, name, context, destructor), obj=obj) 

1895 logger.trace(pickler, "# Cap") 

1896 _incedental_reverse_typemap['PyCapsuleType'] = PyCapsuleType 

1897 _reverse_typemap['PyCapsuleType'] = PyCapsuleType 

1898 _incedental_types.add(PyCapsuleType) 

1899else: 

1900 _testcapsule = None 

1901 

1902 

1903############################# 

1904# A quick fix for issue #500 

1905# This should be removed when a better solution is found. 

1906 

1907if hasattr(dataclasses, "_HAS_DEFAULT_FACTORY_CLASS"): 

1908 @register(dataclasses._HAS_DEFAULT_FACTORY_CLASS) 

1909 def save_dataclasses_HAS_DEFAULT_FACTORY_CLASS(pickler, obj): 

1910 logger.trace(pickler, "DcHDF: %s", obj) 

1911 pickler.write(GLOBAL + b"dataclasses\n_HAS_DEFAULT_FACTORY\n") 

1912 logger.trace(pickler, "# DcHDF") 

1913 

1914if hasattr(dataclasses, "MISSING"): 

1915 @register(type(dataclasses.MISSING)) 

1916 def save_dataclasses_MISSING_TYPE(pickler, obj): 

1917 logger.trace(pickler, "DcM: %s", obj) 

1918 pickler.write(GLOBAL + b"dataclasses\nMISSING\n") 

1919 logger.trace(pickler, "# DcM") 

1920 

1921if hasattr(dataclasses, "KW_ONLY"): 

1922 @register(type(dataclasses.KW_ONLY)) 

1923 def save_dataclasses_KW_ONLY_TYPE(pickler, obj): 

1924 logger.trace(pickler, "DcKWO: %s", obj) 

1925 pickler.write(GLOBAL + b"dataclasses\nKW_ONLY\n") 

1926 logger.trace(pickler, "# DcKWO") 

1927 

1928if hasattr(dataclasses, "_FIELD_BASE"): 

1929 @register(dataclasses._FIELD_BASE) 

1930 def save_dataclasses_FIELD_BASE(pickler, obj): 

1931 logger.trace(pickler, "DcFB: %s", obj) 

1932 pickler.write(GLOBAL + b"dataclasses\n" + obj.name.encode() + b"\n") 

1933 logger.trace(pickler, "# DcFB") 

1934 

1935############################# 

1936 

1937# quick sanity checking 

1938def pickles(obj,exact=False,safe=False,**kwds): 

1939 """ 

1940 Quick check if object pickles with dill. 

1941 

1942 If *exact=True* then an equality test is done to check if the reconstructed 

1943 object matches the original object. 

1944 

1945 If *safe=True* then any exception will raised in copy signal that the 

1946 object is not picklable, otherwise only pickling errors will be trapped. 

1947 

1948 Additional keyword arguments are as :func:`dumps` and :func:`loads`. 

1949 """ 

1950 if safe: exceptions = (Exception,) # RuntimeError, ValueError 

1951 else: 

1952 exceptions = (TypeError, AssertionError, NotImplementedError, PicklingError, UnpicklingError) 

1953 try: 

1954 pik = copy(obj, **kwds) 

1955 #FIXME: should check types match first, then check content if "exact" 

1956 try: 

1957 #FIXME: should be "(pik == obj).all()" for numpy comparison, though that'll fail if shapes differ 

1958 result = bool(pik.all() == obj.all()) 

1959 except (AttributeError, TypeError): 

1960 warnings.filterwarnings('ignore') 

1961 result = pik == obj 

1962 warnings.resetwarnings() 

1963 if hasattr(result, 'toarray'): # for unusual types like sparse matrix 

1964 result = result.toarray().all() 

1965 if result: return True 

1966 if not exact: 

1967 result = type(pik) == type(obj) 

1968 if result: return result 

1969 # class instances might have been dumped with byref=False 

1970 return repr(type(pik)) == repr(type(obj)) #XXX: InstanceType? 

1971 return False 

1972 except exceptions: 

1973 return False 

1974 

1975def check(obj, *args, **kwds): 

1976 """ 

1977 Check pickling of an object across another process. 

1978 

1979 *python* is the path to the python interpreter (defaults to sys.executable) 

1980 

1981 Set *verbose=True* to print the unpickled object in the other process. 

1982 

1983 Additional keyword arguments are as :func:`dumps` and :func:`loads`. 

1984 """ 

1985 # == undocumented == 

1986 # python -- the string path or executable name of the selected python 

1987 # verbose -- if True, be verbose about printing warning messages 

1988 # all other args and kwds are passed to dill.dumps #FIXME: ignore on load 

1989 verbose = kwds.pop('verbose', False) 

1990 python = kwds.pop('python', None) 

1991 if python is None: 

1992 import sys 

1993 python = sys.executable 

1994 # type check 

1995 isinstance(python, str) 

1996 import subprocess 

1997 fail = True 

1998 try: 

1999 _obj = dumps(obj, *args, **kwds) 

2000 fail = False 

2001 finally: 

2002 if fail and verbose: 

2003 print("DUMP FAILED") 

2004 #FIXME: fails if python interpreter path contains spaces 

2005 # Use the following instead (which also processes the 'ignore' keyword): 

2006 # ignore = kwds.pop('ignore', None) 

2007 # unpickle = "dill.loads(%s, ignore=%s)"%(repr(_obj), repr(ignore)) 

2008 # cmd = [python, "-c", "import dill; print(%s)"%unpickle] 

2009 # msg = "SUCCESS" if not subprocess.call(cmd) else "LOAD FAILED" 

2010 msg = "%s -c import dill; print(dill.loads(%s))" % (python, repr(_obj)) 

2011 msg = "SUCCESS" if not subprocess.call(msg.split(None,2)) else "LOAD FAILED" 

2012 if verbose: 

2013 print(msg) 

2014 return 

2015 

2016# use to protect against missing attributes 

2017def is_dill(pickler, child=None): 

2018 "check the dill-ness of your pickler" 

2019 if child is False or not hasattr(pickler.__class__, 'mro'): 

2020 return 'dill' in pickler.__module__ 

2021 return Pickler in pickler.__class__.mro() 

2022 

2023def _extend(): 

2024 """extend pickle with all of dill's registered types""" 

2025 # need to have pickle not choke on _main_module? use is_dill(pickler) 

2026 for t,func in Pickler.dispatch.items(): 

2027 try: 

2028 StockPickler.dispatch[t] = func 

2029 except Exception: #TypeError, PicklingError, UnpicklingError 

2030 logger.trace(pickler, "skip: %s", t) 

2031 return 

2032 

2033del diff, _use_diff, use_diff 

2034 

2035# EOF