Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/jsonpickle/unpickler.py: 16%

458 statements  

« prev     ^ index     » next       coverage.py v7.2.7, created at 2023-06-07 06:20 +0000

1# Copyright (C) 2008 John Paulett (john -at- paulett.org) 

2# Copyright (C) 2009-2018 David Aguilar (davvid -at- gmail.com) 

3# All rights reserved. 

4# 

5# This software is licensed as described in the file COPYING, which 

6# you should have received as part of this distribution. 

7from __future__ import absolute_import, division, unicode_literals 

8 

9import dataclasses 

10import sys 

11import warnings 

12 

13from . import compat, errors, handlers, tags, util 

14from .backend import json 

15from .compat import numeric_types 

16 

17 

18def decode( 

19 string, 

20 backend=None, 

21 context=None, 

22 keys=False, 

23 reset=True, 

24 safe=False, 

25 classes=None, 

26 v1_decode=False, 

27 on_missing="ignore", 

28): 

29 """Convert a JSON string into a Python object. 

30 

31 The keyword argument 'keys' defaults to False. 

32 If set to True then jsonpickle will decode non-string dictionary keys 

33 into python objects via the jsonpickle protocol. 

34 

35 The keyword argument 'classes' defaults to None. 

36 If set to a single class, or a sequence (list, set, tuple) of classes, 

37 then the classes will be made available when constructing objects. 

38 If set to a dictionary of class names to class objects, the class object 

39 will be provided to jsonpickle to deserialize the class name into. 

40 This can be used to give jsonpickle access to local classes that are not 

41 available through the global module import scope, and the dict method can 

42 be used to deserialize encoded objects into a new class. 

43 

44 The keyword argument 'safe' defaults to False. 

45 If set to True, eval() is avoided, but backwards-compatible 

46 (pre-0.7.0) deserialization of repr-serialized objects is disabled. 

47 

48 The keyword argument 'backend' defaults to None. 

49 If set to an instance of jsonpickle.backend.JSONBackend, jsonpickle 

50 will use that backend for deserialization. 

51 

52 The keyword argument 'v1_decode' defaults to False. 

53 If set to True it enables you to decode objects serialized in jsonpickle v1. 

54 Please do not attempt to re-encode the objects in the v1 format! Version 2's 

55 format fixes issue #255, and allows dictionary identity to be preserved 

56 through an encode/decode cycle. 

57 

58 The keyword argument 'on_missing' defaults to 'ignore'. 

59 If set to 'error', it will raise an error if the class it's decoding is not 

60 found. If set to 'warn', it will warn you in said case. If set to a 

61 non-awaitable function, it will call said callback function with the class 

62 name (a string) as the only parameter. Strings passed to on_missing are 

63 lowercased automatically. 

64 

65 

66 >>> decode('"my string"') == 'my string' 

67 True 

68 >>> decode('36') 

69 36 

70 """ 

71 

72 if isinstance(on_missing, str): 

73 on_missing = on_missing.lower() 

74 elif not util.is_function(on_missing): 

75 warnings.warn( 

76 "Unpickler.on_missing must be a string or a function! It will be ignored!" 

77 ) 

78 

79 backend = backend or json 

80 context = context or Unpickler( 

81 keys=keys, 

82 backend=backend, 

83 safe=safe, 

84 v1_decode=v1_decode, 

85 on_missing=on_missing, 

86 ) 

87 data = backend.decode(string) 

88 return context.restore(data, reset=reset, classes=classes) 

89 

90 

91def _safe_hasattr(obj, attr): 

92 """Workaround unreliable hasattr() availability on sqlalchemy objects""" 

93 try: 

94 object.__getattribute__(obj, attr) 

95 return True 

96 except AttributeError: 

97 return False 

98 

99 

100def _is_json_key(key): 

101 """Has this key a special object that has been encoded to JSON?""" 

102 return isinstance(key, compat.string_types) and key.startswith(tags.JSON_KEY) 

103 

104 

105class _Proxy(object): 

106 """Proxies are dummy objects that are later replaced by real instances 

107 

108 The `restore()` function has to solve a tricky problem when pickling 

109 objects with cyclical references -- the parent instance does not yet 

110 exist. 

111 

112 The problem is that `__getnewargs__()`, `__getstate__()`, custom handlers, 

113 and cyclical objects graphs are allowed to reference the yet-to-be-created 

114 object via the referencing machinery. 

115 

116 In other words, objects are allowed to depend on themselves for 

117 construction! 

118 

119 We solve this problem by placing dummy Proxy objects into the referencing 

120 machinery so that we can construct the child objects before constructing 

121 the parent. Objects are initially created with Proxy attribute values 

122 instead of real references. 

123 

124 We collect all objects that contain references to proxies and run 

125 a final sweep over them to swap in the real instance. This is done 

126 at the very end of the top-level `restore()`. 

127 

128 The `instance` attribute below is replaced with the real instance 

129 after `__new__()` has been used to construct the object and is used 

130 when swapping proxies with real instances. 

131 

132 """ 

133 

134 def __init__(self): 

135 self.instance = None 

136 

137 def get(self): 

138 return self.instance 

139 

140 def reset(self, instance): 

141 self.instance = instance 

142 

143 

144class _IDProxy(_Proxy): 

145 def __init__(self, objs, index): 

146 self._index = index 

147 self._objs = objs 

148 

149 def get(self): 

150 return self._objs[self._index] 

151 

152 

153def _obj_setattr(obj, attr, proxy): 

154 setattr(obj, attr, proxy.get()) 

155 

156 

157def _obj_setvalue(obj, idx, proxy): 

158 obj[idx] = proxy.get() 

159 

160 

161def loadclass(module_and_name, classes=None): 

162 """Loads the module and returns the class. 

163 

164 >>> cls = loadclass('datetime.datetime') 

165 >>> cls.__name__ 

166 'datetime' 

167 

168 >>> loadclass('does.not.exist') 

169 

170 >>> loadclass('builtins.int')() 

171 0 

172 

173 """ 

174 # Check if the class exists in a caller-provided scope 

175 if classes: 

176 try: 

177 return classes[module_and_name] 

178 except KeyError: 

179 # maybe they didn't provide a fully qualified path 

180 try: 

181 return classes[module_and_name.rsplit('.', 1)[-1]] 

182 except KeyError: 

183 pass 

184 # Otherwise, load classes from globally-accessible imports 

185 names = module_and_name.split('.') 

186 # First assume that everything up to the last dot is the module name, 

187 # then try other splits to handle classes that are defined within 

188 # classes 

189 for up_to in range(len(names) - 1, 0, -1): 

190 module = util.untranslate_module_name('.'.join(names[:up_to])) 

191 try: 

192 __import__(module) 

193 obj = sys.modules[module] 

194 for class_name in names[up_to:]: 

195 try: 

196 obj = getattr(obj, class_name) 

197 except AttributeError: 

198 continue 

199 return obj 

200 except (AttributeError, ImportError, ValueError): 

201 continue 

202 return None 

203 

204 

205def has_tag(obj, tag): 

206 """Helper class that tests to see if the obj is a dictionary 

207 and contains a particular key/tag. 

208 

209 >>> obj = {'test': 1} 

210 >>> has_tag(obj, 'test') 

211 True 

212 >>> has_tag(obj, 'fail') 

213 False 

214 

215 >>> has_tag(42, 'fail') 

216 False 

217 

218 """ 

219 return type(obj) is dict and tag in obj 

220 

221 

222def getargs(obj, classes=None): 

223 """Return arguments suitable for __new__()""" 

224 # Let saved newargs take precedence over everything 

225 if has_tag(obj, tags.NEWARGSEX): 

226 raise ValueError("__newargs_ex__ returns both args and kwargs") 

227 

228 if has_tag(obj, tags.NEWARGS): 

229 return obj[tags.NEWARGS] 

230 

231 if has_tag(obj, tags.INITARGS): 

232 return obj[tags.INITARGS] 

233 

234 try: 

235 seq_list = obj[tags.SEQ] 

236 obj_dict = obj[tags.OBJECT] 

237 except KeyError: 

238 return [] 

239 typeref = loadclass(obj_dict, classes=classes) 

240 if not typeref: 

241 return [] 

242 if hasattr(typeref, '_fields'): 

243 if len(typeref._fields) == len(seq_list): 

244 return seq_list 

245 return [] 

246 

247 

248class _trivialclassic: 

249 """ 

250 A trivial class that can be instantiated with no args 

251 """ 

252 

253 

254def make_blank_classic(cls): 

255 """ 

256 Implement the mandated strategy for dealing with classic classes 

257 which cannot be instantiated without __getinitargs__ because they 

258 take parameters 

259 """ 

260 instance = _trivialclassic() 

261 instance.__class__ = cls 

262 return instance 

263 

264 

265def loadrepr(reprstr): 

266 """Returns an instance of the object from the object's repr() string. 

267 It involves the dynamic specification of code. 

268 

269 >>> obj = loadrepr('datetime/datetime.datetime.now()') 

270 >>> obj.__class__.__name__ 

271 'datetime' 

272 

273 """ 

274 module, evalstr = reprstr.split('/') 

275 mylocals = locals() 

276 localname = module 

277 if '.' in localname: 

278 localname = module.split('.', 1)[0] 

279 mylocals[localname] = __import__(module) 

280 return eval(evalstr) 

281 

282 

283def has_tag_dict(obj, tag): 

284 """Helper class that tests to see if the obj is a dictionary 

285 and contains a particular key/tag. 

286 

287 >>> obj = {'test': 1} 

288 >>> has_tag(obj, 'test') 

289 True 

290 >>> has_tag(obj, 'fail') 

291 False 

292 

293 >>> has_tag(42, 'fail') 

294 False 

295 

296 """ 

297 return tag in obj 

298 

299 

300class Unpickler(object): 

301 def __init__( 

302 self, backend=None, keys=False, safe=False, v1_decode=False, on_missing="ignore" 

303 ): 

304 self.backend = backend or json 

305 self.keys = keys 

306 self.safe = safe 

307 self.v1_decode = v1_decode 

308 self.on_missing = on_missing 

309 

310 self.reset() 

311 

312 def reset(self): 

313 """Resets the object's internal state.""" 

314 # Map reference names to object instances 

315 self._namedict = {} 

316 

317 # The stack of names traversed for child objects 

318 self._namestack = [] 

319 

320 # Map of objects to their index in the _objs list 

321 self._obj_to_idx = {} 

322 self._objs = [] 

323 self._proxies = [] 

324 

325 # Extra local classes not accessible globally 

326 self._classes = {} 

327 

328 def _swap_proxies(self): 

329 """Replace proxies with their corresponding instances""" 

330 for obj, attr, proxy, method in self._proxies: 

331 method(obj, attr, proxy) 

332 self._proxies = [] 

333 

334 def _restore(self, obj): 

335 # if obj isn't in these types, neither it nor nothing in it can have a tag 

336 # don't change the tuple of types to a set, it won't work with isinstance 

337 if not isinstance(obj, (str, list, dict, set, tuple)): 

338 

339 def restore(x): 

340 return x 

341 

342 else: 

343 restore = self._restore_tags(obj) 

344 return restore(obj) 

345 

346 def restore(self, obj, reset=True, classes=None): 

347 """Restores a flattened object to its original python state. 

348 

349 Simply returns any of the basic builtin types 

350 

351 >>> u = Unpickler() 

352 >>> u.restore('hello world') == 'hello world' 

353 True 

354 >>> u.restore({'key': 'value'}) == {'key': 'value'} 

355 True 

356 

357 """ 

358 if reset: 

359 self.reset() 

360 if classes: 

361 self.register_classes(classes) 

362 value = self._restore(obj) 

363 if reset: 

364 self._swap_proxies() 

365 return value 

366 

367 def register_classes(self, classes): 

368 """Register one or more classes 

369 

370 :param classes: sequence of classes or a single class to register 

371 

372 """ 

373 if isinstance(classes, (list, tuple, set)): 

374 for cls in classes: 

375 self.register_classes(cls) 

376 elif isinstance(classes, dict): 

377 for cls in classes.values(): 

378 self.register_classes(cls) 

379 else: 

380 self._classes[util.importable_name(classes)] = classes 

381 

382 def _restore_base64(self, obj): 

383 return util.b64decode(obj[tags.B64].encode('utf-8')) 

384 

385 def _restore_base85(self, obj): 

386 return util.b85decode(obj[tags.B85].encode('utf-8')) 

387 

388 def _refname(self): 

389 """Calculates the name of the current location in the JSON stack. 

390 

391 This is called as jsonpickle traverses the object structure to 

392 create references to previously-traversed objects. This allows 

393 cyclical data structures such as doubly-linked lists. 

394 jsonpickle ensures that duplicate python references to the same 

395 object results in only a single JSON object definition and 

396 special reference tags to represent each reference. 

397 

398 >>> u = Unpickler() 

399 >>> u._namestack = [] 

400 >>> u._refname() == '/' 

401 True 

402 >>> u._namestack = ['a'] 

403 >>> u._refname() == '/a' 

404 True 

405 >>> u._namestack = ['a', 'b'] 

406 >>> u._refname() == '/a/b' 

407 True 

408 

409 """ 

410 return '/' + '/'.join(self._namestack) 

411 

412 def _mkref(self, obj): 

413 obj_id = id(obj) 

414 try: 

415 self._obj_to_idx[obj_id] 

416 except KeyError: 

417 self._obj_to_idx[obj_id] = len(self._objs) 

418 self._objs.append(obj) 

419 # Backwards compatibility: old versions of jsonpickle 

420 # produced "py/ref" references. 

421 self._namedict[self._refname()] = obj 

422 return obj 

423 

424 def _restore_list(self, obj): 

425 parent = [] 

426 self._mkref(parent) 

427 children = [self._restore(v) for v in obj] 

428 parent.extend(children) 

429 method = _obj_setvalue 

430 proxies = [ 

431 (parent, idx, value, method) 

432 for idx, value in enumerate(parent) 

433 if isinstance(value, _Proxy) 

434 ] 

435 self._proxies.extend(proxies) 

436 return parent 

437 

438 def _restore_iterator(self, obj): 

439 return iter(self._restore_list(obj[tags.ITERATOR])) 

440 

441 def _swapref(self, proxy, instance): 

442 proxy_id = id(proxy) 

443 instance_id = id(instance) 

444 

445 instance_index = self._obj_to_idx[proxy_id] 

446 self._obj_to_idx[instance_id] = instance_index 

447 del self._obj_to_idx[proxy_id] 

448 

449 self._objs[instance_index] = instance 

450 self._namedict[self._refname()] = instance 

451 

452 def _restore_reduce(self, obj): 

453 """ 

454 Supports restoring with all elements of __reduce__ as per pep 307. 

455 Assumes that iterator items (the last two) are represented as lists 

456 as per pickler implementation. 

457 """ 

458 proxy = _Proxy() 

459 self._mkref(proxy) 

460 reduce_val = list(map(self._restore, obj[tags.REDUCE])) 

461 if len(reduce_val) < 5: 

462 reduce_val.extend([None] * (5 - len(reduce_val))) 

463 f, args, state, listitems, dictitems = reduce_val 

464 

465 if f == tags.NEWOBJ or getattr(f, '__name__', '') == '__newobj__': 

466 # mandated special case 

467 cls = args[0] 

468 if not isinstance(cls, type): 

469 cls = self._restore(cls) 

470 stage1 = cls.__new__(cls, *args[1:]) 

471 else: 

472 stage1 = f(*args) 

473 

474 if state: 

475 try: 

476 stage1.__setstate__(state) 

477 except AttributeError: 

478 # it's fine - we'll try the prescribed default methods 

479 try: 

480 # we can't do a straight update here because we 

481 # need object identity of the state dict to be 

482 # preserved so that _swap_proxies works out 

483 for k, v in stage1.__dict__.items(): 

484 state.setdefault(k, v) 

485 stage1.__dict__ = state 

486 except AttributeError: 

487 # next prescribed default 

488 try: 

489 for k, v in state.items(): 

490 setattr(stage1, k, v) 

491 except Exception: 

492 dict_state, slots_state = state 

493 if dict_state: 

494 stage1.__dict__.update(dict_state) 

495 if slots_state: 

496 for k, v in slots_state.items(): 

497 setattr(stage1, k, v) 

498 

499 if listitems: 

500 # should be lists if not None 

501 try: 

502 stage1.extend(listitems) 

503 except AttributeError: 

504 for x in listitems: 

505 stage1.append(x) 

506 

507 if dictitems: 

508 for k, v in dictitems: 

509 stage1.__setitem__(k, v) 

510 

511 proxy.reset(stage1) 

512 self._swapref(proxy, stage1) 

513 return stage1 

514 

515 def _restore_id(self, obj): 

516 try: 

517 idx = obj[tags.ID] 

518 return self._objs[idx] 

519 except IndexError: 

520 return _IDProxy(self._objs, idx) 

521 

522 def _restore_type(self, obj): 

523 typeref = loadclass(obj[tags.TYPE], classes=self._classes) 

524 if typeref is None: 

525 return obj 

526 return typeref 

527 

528 def _restore_repr(self, obj): 

529 if self.safe: 

530 # eval() is not allowed in safe mode 

531 return None 

532 obj = loadrepr(obj[tags.REPR]) 

533 return self._mkref(obj) 

534 

535 def _loadfactory(self, obj): 

536 try: 

537 default_factory = obj['default_factory'] 

538 except KeyError: 

539 return None 

540 del obj['default_factory'] 

541 return self._restore(default_factory) 

542 

543 def _process_missing(self, class_name): 

544 # most common case comes first 

545 if self.on_missing == 'ignore': 

546 pass 

547 elif self.on_missing == 'warn': 

548 warnings.warn("Unpickler._restore_object could not find %s!" % class_name) 

549 elif self.on_missing == 'error': 

550 raise errors.ClassNotFoundError( 

551 "Unpickler.restore_object could not find %s!" % class_name 

552 ) 

553 elif util.is_function(self.on_missing): 

554 self.on_missing(class_name) 

555 

556 def _restore_pickled_key(self, key): 

557 """Restore a possibly pickled key""" 

558 if _is_json_key(key): 

559 key = decode( 

560 key[len(tags.JSON_KEY) :], 

561 backend=self.backend, 

562 context=self, 

563 keys=True, 

564 reset=False, 

565 ) 

566 return key 

567 

568 def _restore_key_fn(self): 

569 """Return a callable that restores keys 

570 

571 This function is responsible for restoring non-string keys 

572 when we are decoding with `keys=True`. 

573 

574 """ 

575 # This function is called before entering a tight loop 

576 # where the returned function will be called. 

577 # We return a specific function after checking self.keys 

578 # instead of doing so in the body of the function to 

579 # avoid conditional branching inside a tight loop. 

580 if self.keys: 

581 restore_key = self._restore_pickled_key 

582 else: 

583 

584 def restore_key(key): 

585 return key 

586 

587 return restore_key 

588 

589 def _restore_from_dict(self, obj, instance, ignorereserved=True): 

590 restore_key = self._restore_key_fn() 

591 method = _obj_setattr 

592 deferred = {} 

593 

594 for k, v in util.items(obj): 

595 # ignore the reserved attribute 

596 if ignorereserved and k in tags.RESERVED: 

597 continue 

598 if isinstance(k, numeric_types): 

599 str_k = k.__str__() 

600 else: 

601 str_k = k 

602 self._namestack.append(str_k) 

603 k = restore_key(k) 

604 # step into the namespace 

605 value = self._restore(v) 

606 if util.is_noncomplex(instance) or util.is_dictionary_subclass(instance): 

607 try: 

608 if k == '__dict__': 

609 setattr(instance, k, value) 

610 else: 

611 instance[k] = value 

612 except TypeError: 

613 # Immutable object, must be constructed in one shot 

614 if k != '__dict__': 

615 deferred[k] = value 

616 self._namestack.pop() 

617 continue 

618 else: 

619 if not k.startswith('__'): 

620 try: 

621 setattr(instance, k, value) 

622 except KeyError: 

623 # certain numpy objects require us to prepend a _ to the var 

624 # this should go in the np handler but I think this could be 

625 # useful for other code 

626 setattr(instance, f"_{k}", value) 

627 except dataclasses.FrozenInstanceError: 

628 # issue #240 

629 # i think this is the only way to set frozen dataclass attrs 

630 object.__setattr__(instance, k, value) 

631 except AttributeError as e: 

632 # some objects may raise this for read-only attributes (#422) 

633 if ( 

634 hasattr(instance, "__slots__") 

635 and not len(instance.__slots__) 

636 and issubclass(instance.__class__, int) 

637 ): 

638 continue 

639 raise e 

640 else: 

641 setattr(instance, f"_{instance.__class__.__name__}{k}", value) 

642 

643 # This instance has an instance variable named `k` that is 

644 # currently a proxy and must be replaced 

645 if isinstance(value, _Proxy): 

646 self._proxies.append((instance, k, value, method)) 

647 

648 # step out 

649 self._namestack.pop() 

650 

651 if deferred: 

652 # SQLAlchemy Immutable mappings must be constructed in one shot 

653 instance = instance.__class__(deferred) 

654 

655 return instance 

656 

657 def _restore_state(self, obj, instance): 

658 state = self._restore(obj[tags.STATE]) 

659 has_slots = ( 

660 isinstance(state, tuple) and len(state) == 2 and isinstance(state[1], dict) 

661 ) 

662 has_slots_and_dict = has_slots and isinstance(state[0], dict) 

663 if hasattr(instance, '__setstate__'): 

664 instance.__setstate__(state) 

665 elif isinstance(state, dict): 

666 # implements described default handling 

667 # of state for object with instance dict 

668 # and no slots 

669 instance = self._restore_from_dict(state, instance, ignorereserved=False) 

670 elif has_slots: 

671 instance = self._restore_from_dict(state[1], instance, ignorereserved=False) 

672 if has_slots_and_dict: 

673 instance = self._restore_from_dict( 

674 state[0], instance, ignorereserved=False 

675 ) 

676 elif not hasattr(instance, '__getnewargs__') and not hasattr( 

677 instance, '__getnewargs_ex__' 

678 ): 

679 # __setstate__ is not implemented so that means that the best 

680 # we can do is return the result of __getstate__() rather than 

681 # return an empty shell of an object. 

682 # However, if there were newargs, it's not an empty shell 

683 instance = state 

684 return instance 

685 

686 def _restore_object_instance_variables(self, obj, instance): 

687 instance = self._restore_from_dict(obj, instance) 

688 

689 # Handle list and set subclasses 

690 if has_tag(obj, tags.SEQ): 

691 if hasattr(instance, 'append'): 

692 for v in obj[tags.SEQ]: 

693 instance.append(self._restore(v)) 

694 elif hasattr(instance, 'add'): 

695 for v in obj[tags.SEQ]: 

696 instance.add(self._restore(v)) 

697 

698 if has_tag(obj, tags.STATE): 

699 instance = self._restore_state(obj, instance) 

700 

701 return instance 

702 

703 def _restore_object_instance(self, obj, cls, class_name=""): 

704 # This is a placeholder proxy object which allows child objects to 

705 # reference the parent object before it has been instantiated. 

706 proxy = _Proxy() 

707 self._mkref(proxy) 

708 

709 # An object can install itself as its own factory, so load the factory 

710 # after the instance is available for referencing. 

711 factory = self._loadfactory(obj) 

712 

713 if has_tag(obj, tags.NEWARGSEX): 

714 args, kwargs = obj[tags.NEWARGSEX] 

715 else: 

716 args = getargs(obj, classes=self._classes) 

717 kwargs = {} 

718 if args: 

719 args = self._restore(args) 

720 if kwargs: 

721 kwargs = self._restore(kwargs) 

722 

723 is_oldstyle = not (isinstance(cls, type) or getattr(cls, '__meta__', None)) 

724 try: 

725 if (not is_oldstyle) and hasattr(cls, '__new__'): 

726 # new style classes 

727 if factory: 

728 instance = cls.__new__(cls, factory, *args, **kwargs) 

729 instance.default_factory = factory 

730 else: 

731 instance = cls.__new__(cls, *args, **kwargs) 

732 else: 

733 instance = object.__new__(cls) 

734 except TypeError: # old-style classes 

735 is_oldstyle = True 

736 

737 if is_oldstyle: 

738 try: 

739 instance = cls(*args) 

740 except TypeError: # fail gracefully 

741 try: 

742 instance = make_blank_classic(cls) 

743 except Exception: # fail gracefully 

744 self._process_missing(class_name) 

745 return self._mkref(obj) 

746 

747 proxy.reset(instance) 

748 self._swapref(proxy, instance) 

749 

750 if isinstance(instance, tuple): 

751 return instance 

752 

753 instance = self._restore_object_instance_variables(obj, instance) 

754 

755 if _safe_hasattr(instance, 'default_factory') and isinstance( 

756 instance.default_factory, _Proxy 

757 ): 

758 instance.default_factory = instance.default_factory.get() 

759 

760 return instance 

761 

762 def _restore_object(self, obj): 

763 class_name = obj[tags.OBJECT] 

764 cls = loadclass(class_name, classes=self._classes) 

765 handler = handlers.get(cls, handlers.get(class_name)) 

766 if handler is not None: # custom handler 

767 proxy = _Proxy() 

768 self._mkref(proxy) 

769 instance = handler(self).restore(obj) 

770 proxy.reset(instance) 

771 self._swapref(proxy, instance) 

772 return instance 

773 

774 if cls is None: 

775 return self._mkref(obj) 

776 

777 return self._restore_object_instance(obj, cls, class_name) 

778 

779 def _restore_function(self, obj): 

780 return loadclass(obj[tags.FUNCTION], classes=self._classes) 

781 

782 def _restore_set(self, obj): 

783 return {self._restore(v) for v in obj[tags.SET]} 

784 

785 def _restore_dict(self, obj): 

786 data = {} 

787 if not self.v1_decode: 

788 self._mkref(data) 

789 

790 # If we are decoding dicts that can have non-string keys then we 

791 # need to do a two-phase decode where the non-string keys are 

792 # processed last. This ensures a deterministic order when 

793 # assigning object IDs for references. 

794 if self.keys: 

795 # Phase 1: regular non-special keys. 

796 for k, v in util.items(obj): 

797 if _is_json_key(k): 

798 continue 

799 if isinstance(k, numeric_types): 

800 str_k = k.__str__() 

801 else: 

802 str_k = k 

803 self._namestack.append(str_k) 

804 data[k] = self._restore(v) 

805 

806 self._namestack.pop() 

807 

808 # Phase 2: object keys only. 

809 for k, v in util.items(obj): 

810 if not _is_json_key(k): 

811 continue 

812 self._namestack.append(k) 

813 

814 k = self._restore_pickled_key(k) 

815 data[k] = result = self._restore(v) 

816 # k is currently a proxy and must be replaced 

817 if isinstance(result, _Proxy): 

818 self._proxies.append((data, k, result, _obj_setvalue)) 

819 

820 self._namestack.pop() 

821 else: 

822 # No special keys, thus we don't need to restore the keys either. 

823 for k, v in util.items(obj): 

824 if isinstance(k, numeric_types): 

825 str_k = k.__str__() 

826 else: 

827 str_k = k 

828 self._namestack.append(str_k) 

829 data[k] = self._restore(v) 

830 self._namestack.pop() 

831 return data 

832 

833 def _restore_tuple(self, obj): 

834 return tuple([self._restore(v) for v in obj[tags.TUPLE]]) 

835 

836 def _restore_tags(self, obj): 

837 try: 

838 if not tags.RESERVED <= set(obj) and not type(obj) in (list, dict): 

839 

840 def restore(x): 

841 return x 

842 

843 return restore 

844 except TypeError: 

845 pass 

846 if type(obj) is dict: 

847 if tags.TUPLE in obj: 

848 restore = self._restore_tuple 

849 elif tags.SET in obj: 

850 restore = self._restore_set 

851 elif tags.B64 in obj: 

852 restore = self._restore_base64 

853 elif tags.B85 in obj: 

854 restore = self._restore_base85 

855 elif tags.ID in obj: 

856 restore = self._restore_id 

857 elif tags.ITERATOR in obj: 

858 restore = self._restore_iterator 

859 elif tags.OBJECT in obj: 

860 restore = self._restore_object 

861 elif tags.TYPE in obj: 

862 restore = self._restore_type 

863 elif tags.REDUCE in obj: 

864 restore = self._restore_reduce 

865 elif tags.FUNCTION in obj: 

866 restore = self._restore_function 

867 elif tags.REPR in obj: # Backwards compatibility 

868 restore = self._restore_repr 

869 else: 

870 restore = self._restore_dict 

871 elif util.is_list(obj): 

872 restore = self._restore_list 

873 else: 

874 

875 def restore(x): 

876 return x 

877 

878 return restore