Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.11/site-packages/jsonpickle/unpickler.py: 69%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

524 statements  

1# Copyright (C) 2008 John Paulett (john -at- paulett.org) 

2# Copyright (C) 2009-2024 David Aguilar (davvid -at- gmail.com) 

3# All rights reserved. 

4# 

5# This software is licensed as described in the file COPYING, which 

6# you should have received as part of this distribution. 

7import dataclasses 

8import sys 

9import warnings 

10from types import ModuleType 

11from typing import ( 

12 Any, 

13 Callable, 

14 Dict, 

15 Iterator, 

16 List, 

17 Optional, 

18 Sequence, 

19 Set, 

20 Tuple, 

21 Type, 

22 Union, 

23) 

24 

25from . import errors, handlers, tags, util 

26from .backend import JSONBackend, json 

27 

28# class names to class objects (or sequence of classes) 

29ClassesType = Optional[Union[Type[Any], Dict[str, Type[Any]], Sequence[Type[Any]]]] 

30# handler for missing classes: either a policy name or a callback 

31MissingHandler = Union[str, Callable[[str], Any]] 

32 

33 

34def decode( 

35 string: str, 

36 backend: Optional[JSONBackend] = None, 

37 # we get a lot of errors when typing with TypeVar 

38 context: Optional["Unpickler"] = None, 

39 keys: bool = False, 

40 reset: bool = True, 

41 safe: bool = True, 

42 classes: Optional[ClassesType] = None, 

43 v1_decode: bool = False, 

44 on_missing: MissingHandler = "ignore", 

45 handle_readonly: bool = False, 

46) -> Any: 

47 """Convert a JSON string into a Python object. 

48 

49 :param backend: If set to an instance of jsonpickle.backend.JSONBackend, jsonpickle 

50 will use that backend for deserialization. 

51 

52 :param context: Supply a pre-built Pickler or Unpickler object to the 

53 `jsonpickle.encode` and `jsonpickle.decode` machinery instead 

54 of creating a new instance. The `context` represents the currently 

55 active Pickler and Unpickler objects when custom handlers are 

56 invoked by jsonpickle. 

57 

58 :param keys: If set to True then jsonpickle will decode non-string dictionary keys 

59 into python objects via the jsonpickle protocol. 

60 

61 :param reset: Custom pickle handlers that use the `Pickler.flatten` method or 

62 `jsonpickle.encode` function must call `encode` with `reset=False` 

63 in order to retain object references during pickling. 

64 This flag is not typically used outside of a custom handler or 

65 `__getstate__` implementation. 

66 

67 :param safe: If set to ``False``, use of ``eval()`` for backwards-compatible (pre-0.7.0) 

68 deserialization of repr-serialized objects is enabled. Defaults to ``True``. 

69 The default value was ``False`` in jsonpickle v3 and changed to ``True`` in jsonpickle v4. 

70 

71 .. warning:: 

72 

73 ``eval()`` is used when set to ``False`` and is not secure against 

74 malicious inputs. You should avoid setting ``safe=False``. 

75 

76 :param classes: If set to a single class, or a sequence (list, set, tuple) of 

77 classes, then the classes will be made available when constructing objects. 

78 If set to a dictionary of class names to class objects, the class object 

79 will be provided to jsonpickle to deserialize the class name into. 

80 This can be used to give jsonpickle access to local classes that are not 

81 available through the global module import scope, and the dict method can 

82 be used to deserialize encoded objects into a new class. 

83 

84 :param v1_decode: If set to True it enables you to decode objects serialized in 

85 jsonpickle v1. Please do not attempt to re-encode the objects in the v1 format! 

86 Version 2's format fixes issue #255, and allows dictionary identity to be 

87 preserved through an encode/decode cycle. 

88 

89 :param on_missing: If set to 'error', it will raise an error if the class it's 

90 decoding is not found. If set to 'warn', it will warn you in said case. 

91 If set to a non-awaitable function, it will call said callback function 

92 with the class name (a string) as the only parameter. Strings passed to 

93 `on_missing` are lowercased automatically. 

94 

95 :param handle_readonly: If set to True, the Unpickler will handle objects encoded 

96 with 'handle_readonly' properly. Do not set this flag for objects not encoded 

97 with 'handle_readonly' set to True. 

98 

99 

100 >>> decode('"my string"') == 'my string' 

101 True 

102 >>> decode('36') 

103 36 

104 """ 

105 

106 if isinstance(on_missing, str): 

107 on_missing = on_missing.lower() 

108 elif not util._is_function(on_missing): 

109 warnings.warn( 

110 "Unpickler.on_missing must be a string or a function! It will be ignored!" 

111 ) 

112 

113 backend = backend or json 

114 is_ephemeral_context = context is None 

115 context = context or Unpickler( 

116 keys=keys, 

117 backend=backend, 

118 safe=safe, 

119 v1_decode=v1_decode, 

120 on_missing=on_missing, 

121 handle_readonly=handle_readonly, 

122 ) 

123 data = backend.decode(string) 

124 result = context.restore(data, reset=reset, classes=classes) 

125 if is_ephemeral_context: 

126 # Avoid holding onto references to external objects, which can 

127 # prevent garbage collection from occuring. 

128 context.reset() 

129 return result 

130 

131 

132def _safe_hasattr(obj: Any, attr: str) -> bool: 

133 """Workaround unreliable hasattr() availability on sqlalchemy objects""" 

134 try: 

135 object.__getattribute__(obj, attr) 

136 return True 

137 except AttributeError: 

138 return False 

139 

140 

141def _is_json_key(key: Any) -> bool: 

142 """Has this key a special object that has been encoded to JSON?""" 

143 return isinstance(key, str) and key.startswith(tags.JSON_KEY) 

144 

145 

146class _Proxy: 

147 """Proxies are dummy objects that are later replaced by real instances 

148 

149 The `restore()` function has to solve a tricky problem when pickling 

150 objects with cyclical references -- the parent instance does not yet 

151 exist. 

152 

153 The problem is that `__getnewargs__()`, `__getstate__()`, custom handlers, 

154 and cyclical objects graphs are allowed to reference the yet-to-be-created 

155 object via the referencing machinery. 

156 

157 In other words, objects are allowed to depend on themselves for 

158 construction! 

159 

160 We solve this problem by placing dummy Proxy objects into the referencing 

161 machinery so that we can construct the child objects before constructing 

162 the parent. Objects are initially created with Proxy attribute values 

163 instead of real references. 

164 

165 We collect all objects that contain references to proxies and run 

166 a final sweep over them to swap in the real instance. This is done 

167 at the very end of the top-level `restore()`. 

168 

169 The `instance` attribute below is replaced with the real instance 

170 after `__new__()` has been used to construct the object and is used 

171 when swapping proxies with real instances. 

172 

173 """ 

174 

175 def __init__(self) -> None: 

176 self.instance = None 

177 

178 def get(self) -> Any: 

179 return self.instance 

180 

181 def reset(self, instance: Any) -> None: 

182 self.instance = instance 

183 

184 

185class _IDProxy(_Proxy): 

186 def __init__(self, objs: List[Any], index: int) -> None: 

187 self._index = index 

188 self._objs = objs 

189 

190 def get(self) -> Any: 

191 try: 

192 return self._objs[self._index] 

193 except IndexError: 

194 return None 

195 

196 

197def _obj_setattr(obj: Any, attr: str, proxy: _Proxy) -> None: 

198 """Use setattr to update a proxy entry""" 

199 setattr(obj, attr, proxy.get()) 

200 

201 

202def _obj_setvalue(obj: Any, idx: Any, proxy: _Proxy) -> None: 

203 """Use obj[key] assignments to update a proxy entry""" 

204 obj[idx] = proxy.get() 

205 

206 

207def loadclass( 

208 module_and_name: str, classes: Optional[Dict[str, Type[Any]]] = None 

209) -> Optional[Union[Type[Any], ModuleType]]: 

210 """Loads the module and returns the class. 

211 

212 >>> cls = loadclass('datetime.datetime') 

213 >>> cls.__name__ 

214 'datetime' 

215 

216 >>> loadclass('does.not.exist') 

217 

218 >>> loadclass('builtins.int')() 

219 0 

220 

221 """ 

222 # Check if the class exists in a caller-provided scope 

223 if classes: 

224 try: 

225 return classes[module_and_name] 

226 except KeyError: 

227 # maybe they didn't provide a fully qualified path 

228 try: 

229 return classes[module_and_name.rsplit(".", 1)[-1]] 

230 except KeyError: 

231 pass 

232 # Otherwise, load classes from globally-accessible imports 

233 names = module_and_name.split(".") 

234 # First assume that everything up to the last dot is the module name, 

235 # then try other splits to handle classes that are defined within 

236 # classes 

237 for up_to in range(len(names) - 1, 0, -1): 

238 module = util.untranslate_module_name(".".join(names[:up_to])) 

239 try: 

240 __import__(module) 

241 obj = sys.modules[module] 

242 for class_name in names[up_to:]: 

243 obj = getattr(obj, class_name) 

244 return obj 

245 except (AttributeError, ImportError, ValueError): 

246 continue 

247 # NoneType is a special case and can not be imported/created 

248 if module_and_name == "builtins.NoneType": 

249 return type(None) 

250 return None 

251 

252 

253def has_tag(obj: Any, tag: str) -> bool: 

254 """Helper class that tests to see if the obj is a dictionary 

255 and contains a particular key/tag. 

256 

257 >>> obj = {'test': 1} 

258 >>> has_tag(obj, 'test') 

259 True 

260 >>> has_tag(obj, 'fail') 

261 False 

262 

263 >>> has_tag(42, 'fail') 

264 False 

265 

266 """ 

267 return type(obj) is dict and tag in obj 

268 

269 

270def getargs(obj: Dict[str, Any], classes: Optional[Dict[str, Type[Any]]] = None) -> Any: 

271 """Return arguments suitable for __new__()""" 

272 # Let saved newargs take precedence over everything 

273 if has_tag(obj, tags.NEWARGSEX): 

274 raise ValueError("__newargs_ex__ returns both args and kwargs") 

275 

276 if has_tag(obj, tags.NEWARGS): 

277 return obj[tags.NEWARGS] 

278 

279 if has_tag(obj, tags.INITARGS): 

280 return obj[tags.INITARGS] 

281 

282 try: 

283 seq_list = obj[tags.SEQ] 

284 obj_dict = obj[tags.OBJECT] 

285 except KeyError: 

286 return [] 

287 typeref = loadclass(obj_dict, classes=classes) 

288 if not typeref: 

289 return [] 

290 if hasattr(typeref, "_fields"): 

291 if len(typeref._fields) == len(seq_list): 

292 return seq_list 

293 return [] 

294 

295 

296class _trivialclassic: 

297 """ 

298 A trivial class that can be instantiated with no args 

299 """ 

300 

301 

302def make_blank_classic(cls: Type[Any]) -> Any: 

303 """ 

304 Implement the mandated strategy for dealing with classic classes 

305 which cannot be instantiated without __getinitargs__ because they 

306 take parameters 

307 """ 

308 instance = _trivialclassic() 

309 instance.__class__ = cls 

310 return instance 

311 

312 

313def loadrepr(reprstr: str) -> Any: 

314 """Returns an instance of the object from the object's repr() string. 

315 It involves the dynamic specification of code. 

316 

317 .. warning:: 

318 

319 This function is unsafe and uses `eval()`. 

320 

321 >>> obj = loadrepr('datetime/datetime.datetime.now()') 

322 >>> obj.__class__.__name__ 

323 'datetime' 

324 

325 """ 

326 module, evalstr = reprstr.split("/") 

327 mylocals = locals() 

328 localname = module 

329 if "." in localname: 

330 localname = module.split(".", 1)[0] 

331 mylocals[localname] = __import__(module) 

332 return eval(evalstr, mylocals) 

333 

334 

335def _loadmodule(module_str: str) -> Optional[Any]: 

336 """Returns a reference to a module. 

337 

338 >>> fn = _loadmodule('datetime/datetime.datetime.fromtimestamp') 

339 >>> fn.__name__ 

340 'fromtimestamp' 

341 

342 """ 

343 module, identifier = module_str.split("/") 

344 try: 

345 result = __import__(module) 

346 except ImportError: 

347 return None 

348 identifier_parts = identifier.split(".") 

349 first_identifier = identifier_parts[0] 

350 if first_identifier != module and not module.startswith(f"{first_identifier}."): 

351 return None 

352 for name in identifier_parts[1:]: 

353 try: 

354 result = getattr(result, name) 

355 except AttributeError: 

356 return None 

357 return result 

358 

359 

360def has_tag_dict(obj: Any, tag: str) -> bool: 

361 """Helper class that tests to see if the obj is a dictionary 

362 and contains a particular key/tag. 

363 

364 >>> obj = {'test': 1} 

365 >>> has_tag(obj, 'test') 

366 True 

367 >>> has_tag(obj, 'fail') 

368 False 

369 

370 >>> has_tag(42, 'fail') 

371 False 

372 

373 """ 

374 return tag in obj 

375 

376 

377def _passthrough(value: Any) -> Any: 

378 """A function that returns its input as-is""" 

379 return value 

380 

381 

382class Unpickler: 

383 def __init__( 

384 self, 

385 backend: Optional[JSONBackend] = None, 

386 keys: bool = False, 

387 safe: bool = True, 

388 v1_decode: bool = False, 

389 on_missing: MissingHandler = "ignore", 

390 handle_readonly: bool = False, 

391 ) -> None: 

392 self.backend = backend or json 

393 self.keys = keys 

394 self.safe = safe 

395 self.v1_decode = v1_decode 

396 self.on_missing = on_missing 

397 self.handle_readonly = handle_readonly 

398 

399 self.reset() 

400 

401 def reset(self) -> None: 

402 """Resets the object's internal state.""" 

403 # Map reference names to object instances 

404 self._namedict = {} 

405 

406 # The stack of names traversed for child objects 

407 self._namestack = [] 

408 

409 # Map of objects to their index in the _objs list 

410 self._obj_to_idx = {} 

411 self._objs = [] 

412 self._proxies = [] 

413 

414 # Extra local classes not accessible globally 

415 self._classes = {} 

416 

417 def _swap_proxies(self) -> None: 

418 """Replace proxies with their corresponding instances""" 

419 for obj, attr, proxy, method in self._proxies: 

420 method(obj, attr, proxy) 

421 self._proxies = [] 

422 

423 def _restore( 

424 self, obj: Any, _passthrough: Callable[[Any], Any] = _passthrough 

425 ) -> Any: 

426 # if obj isn't in these types, neither it nor nothing in it can have a tag 

427 # don't change the tuple of types to a set, it won't work with isinstance 

428 if not isinstance(obj, (str, list, dict, set, tuple)): 

429 restore = _passthrough 

430 else: 

431 restore = self._restore_tags(obj) 

432 return restore(obj) 

433 

434 def restore( 

435 self, obj: Any, reset: bool = True, classes: Optional[ClassesType] = None 

436 ) -> Any: 

437 """Restores a flattened object to its original python state. 

438 

439 Simply returns any of the basic builtin types 

440 

441 >>> u = Unpickler() 

442 >>> u.restore('hello world') == 'hello world' 

443 True 

444 >>> u.restore({'key': 'value'}) == {'key': 'value'} 

445 True 

446 

447 """ 

448 if reset: 

449 self.reset() 

450 if classes: 

451 self.register_classes(classes) 

452 value = self._restore(obj) 

453 if reset: 

454 self._swap_proxies() 

455 return value 

456 

457 def register_classes(self, classes: ClassesType) -> None: 

458 """Register one or more classes 

459 

460 :param classes: sequence of classes or a single class to register 

461 

462 """ 

463 if isinstance(classes, (list, tuple, set)): 

464 for cls in classes: 

465 self.register_classes(cls) 

466 elif isinstance(classes, dict): 

467 self._classes.update( 

468 ( 

469 cls if isinstance(cls, str) else util.importable_name(cls), 

470 handler, 

471 ) 

472 for cls, handler in classes.items() 

473 ) 

474 else: 

475 self._classes[util.importable_name(classes)] = classes # type: ignore[arg-type] 

476 

477 def _restore_base64(self, obj: Dict[str, Any]) -> bytes: 

478 try: 

479 return util.b64decode(obj[tags.B64].encode("utf-8")) 

480 except (AttributeError, UnicodeEncodeError): 

481 return b"" 

482 

483 def _restore_base85(self, obj: Dict[str, Any]) -> bytes: 

484 try: 

485 return util.b85decode(obj[tags.B85].encode("utf-8")) 

486 except (AttributeError, UnicodeEncodeError): 

487 return b"" 

488 

489 def _refname(self) -> str: 

490 """Calculates the name of the current location in the JSON stack. 

491 

492 This is called as jsonpickle traverses the object structure to 

493 create references to previously-traversed objects. This allows 

494 cyclical data structures such as doubly-linked lists. 

495 jsonpickle ensures that duplicate python references to the same 

496 object results in only a single JSON object definition and 

497 special reference tags to represent each reference. 

498 

499 >>> u = Unpickler() 

500 >>> u._namestack = [] 

501 >>> u._refname() == '/' 

502 True 

503 >>> u._namestack = ['a'] 

504 >>> u._refname() == '/a' 

505 True 

506 >>> u._namestack = ['a', 'b'] 

507 >>> u._refname() == '/a/b' 

508 True 

509 

510 """ 

511 return "/" + "/".join(self._namestack) 

512 

513 def _mkref(self, obj: Any) -> Any: 

514 obj_id = id(obj) 

515 try: 

516 _ = self._obj_to_idx[obj_id] 

517 except KeyError: 

518 self._obj_to_idx[obj_id] = len(self._objs) 

519 self._objs.append(obj) 

520 # Backwards compatibility: old versions of jsonpickle 

521 # produced "py/ref" references. 

522 self._namedict[self._refname()] = obj 

523 return obj 

524 

525 def _restore_list(self, obj: List[Any]) -> List[Any]: 

526 parent = [] 

527 self._mkref(parent) 

528 children = [self._restore(v) for v in obj] 

529 parent.extend(children) 

530 method = _obj_setvalue 

531 proxies = [ 

532 (parent, idx, value, method) 

533 for idx, value in enumerate(parent) 

534 if isinstance(value, _Proxy) 

535 ] 

536 self._proxies.extend(proxies) 

537 return parent 

538 

539 def _restore_iterator(self, obj: Dict[str, Any]) -> Iterator[Any]: 

540 try: 

541 return iter(self._restore_list(obj[tags.ITERATOR])) 

542 except TypeError: 

543 return iter([]) 

544 

545 def _swapref(self, proxy: _Proxy, instance: Any) -> None: 

546 proxy_id = id(proxy) 

547 instance_id = id(instance) 

548 

549 instance_index = self._obj_to_idx[proxy_id] 

550 self._obj_to_idx[instance_id] = instance_index 

551 del self._obj_to_idx[proxy_id] 

552 

553 self._objs[instance_index] = instance 

554 self._namedict[self._refname()] = instance 

555 

556 def _restore_reduce(self, obj: Dict[str, Any]) -> Any: 

557 """ 

558 Supports restoring with all elements of __reduce__ as per pep 307. 

559 Assumes that iterator items (the last two) are represented as lists 

560 as per pickler implementation. 

561 """ 

562 proxy = _Proxy() 

563 self._mkref(proxy) 

564 try: 

565 reduce_val = list(map(self._restore, obj[tags.REDUCE])) 

566 except TypeError: 

567 result = [] 

568 proxy.reset(result) 

569 self._swapref(proxy, result) 

570 return result 

571 if len(reduce_val) < 5: 

572 reduce_val.extend([None] * (5 - len(reduce_val))) 

573 f, args, state, listitems, dictitems = reduce_val 

574 

575 if f == tags.NEWOBJ or getattr(f, "__name__", "") == "__newobj__": 

576 # mandated special case 

577 cls = args[0] 

578 if not isinstance(cls, type): 

579 cls = self._restore(cls) 

580 stage1 = cls.__new__(cls, *args[1:]) 

581 else: 

582 if not callable(f): 

583 result = [] 

584 proxy.reset(result) 

585 self._swapref(proxy, result) 

586 return result 

587 try: 

588 stage1 = f(*args) 

589 except TypeError: 

590 # this happens when there are missing kwargs and args don't match so we bypass 

591 # __init__ since the state dict will set all attributes immediately afterwards 

592 stage1 = f.__new__(f, *args) 

593 

594 if state: 

595 try: 

596 stage1.__setstate__(state) 

597 except AttributeError: 

598 # it's fine - we'll try the prescribed default methods 

599 try: 

600 # we can't do a straight update here because we 

601 # need object identity of the state dict to be 

602 # preserved so that _swap_proxies works out 

603 for k, v in stage1.__dict__.items(): 

604 state.setdefault(k, v) 

605 stage1.__dict__ = state 

606 except AttributeError: 

607 # next prescribed default 

608 try: 

609 for k, v in state.items(): 

610 setattr(stage1, k, v) 

611 except Exception: 

612 dict_state, slots_state = state 

613 if dict_state: 

614 stage1.__dict__.update(dict_state) 

615 if slots_state: 

616 for k, v in slots_state.items(): 

617 setattr(stage1, k, v) 

618 

619 if listitems: 

620 # should be lists if not None 

621 try: 

622 stage1.extend(listitems) 

623 except AttributeError: 

624 for x in listitems: 

625 stage1.append(x) 

626 

627 if dictitems: 

628 for k, v in dictitems: 

629 stage1.__setitem__(k, v) 

630 

631 proxy.reset(stage1) 

632 self._swapref(proxy, stage1) 

633 return stage1 

634 

635 def _restore_id(self, obj: Dict[str, Any]) -> Any: 

636 try: 

637 idx = obj[tags.ID] 

638 return self._objs[idx] 

639 except IndexError: 

640 return _IDProxy(self._objs, idx) 

641 except TypeError: 

642 return None 

643 

644 def _restore_type(self, obj: Dict[str, Any]) -> Any: 

645 typeref = loadclass(obj[tags.TYPE], classes=self._classes) 

646 if typeref is None: 

647 return obj 

648 return typeref 

649 

650 def _restore_module(self, obj: Dict[str, Any]) -> Any: 

651 new_obj = _loadmodule(obj[tags.MODULE]) 

652 return self._mkref(new_obj) 

653 

654 def _restore_repr_safe(self, obj: Dict[str, Any]) -> Any: 

655 new_obj = _loadmodule(obj[tags.REPR]) 

656 return self._mkref(new_obj) 

657 

658 def _restore_repr(self, obj: Dict[str, Any]) -> Any: 

659 obj = loadrepr(obj[tags.REPR]) 

660 return self._mkref(obj) 

661 

662 def _loadfactory(self, obj: Dict[str, Any]) -> Optional[Any]: 

663 try: 

664 default_factory = obj["default_factory"] 

665 except KeyError: 

666 return None 

667 del obj["default_factory"] 

668 return self._restore(default_factory) 

669 

670 def _process_missing(self, class_name: str) -> None: 

671 # most common case comes first 

672 if self.on_missing == "ignore": 

673 pass 

674 elif self.on_missing == "warn": 

675 warnings.warn("Unpickler._restore_object could not find %s!" % class_name) 

676 elif self.on_missing == "error": 

677 raise errors.ClassNotFoundError( 

678 "Unpickler.restore_object could not find %s!" % class_name # type: ignore[arg-type] 

679 ) 

680 elif util._is_function(self.on_missing): 

681 self.on_missing(class_name) # type: ignore[operator] 

682 

683 def _restore_pickled_key(self, key: str) -> Any: 

684 """Restore a possibly pickled key""" 

685 if _is_json_key(key): 

686 key = decode( 

687 key[len(tags.JSON_KEY) :], 

688 backend=self.backend, 

689 context=self, 

690 keys=True, 

691 reset=False, 

692 ) 

693 return key 

694 

695 def _restore_key_fn( 

696 self, _passthrough: Callable[[Any], Any] = _passthrough 

697 ) -> Callable[[Any], Any]: 

698 """Return a callable that restores keys 

699 

700 This function is responsible for restoring non-string keys 

701 when we are decoding with `keys=True`. 

702 

703 """ 

704 # This function is called before entering a tight loop 

705 # where the returned function will be called. 

706 # We return a specific function after checking self.keys 

707 # instead of doing so in the body of the function to 

708 # avoid conditional branching inside a tight loop. 

709 if self.keys: 

710 restore_key = self._restore_pickled_key 

711 else: 

712 restore_key = _passthrough # type: ignore[assignment] 

713 return restore_key 

714 

715 def _restore_from_dict( 

716 self, 

717 obj: Dict[str, Any], 

718 instance: Any, 

719 ignorereserved: bool = True, 

720 restore_dict_items: bool = True, 

721 ) -> Any: 

722 restore_key = self._restore_key_fn() 

723 method = _obj_setattr 

724 deferred = {} 

725 

726 for k, v in util.items(obj): 

727 # ignore the reserved attribute 

728 if ignorereserved and k in tags.RESERVED: 

729 continue 

730 if isinstance(k, (int, float)): 

731 str_k = k.__str__() 

732 else: 

733 str_k = k 

734 self._namestack.append(str_k) 

735 if restore_dict_items: 

736 k = restore_key(k) 

737 # step into the namespace 

738 value = self._restore(v) 

739 else: 

740 value = v 

741 if util._is_noncomplex(instance) or util._is_dictionary_subclass(instance): 

742 try: 

743 if k == "__dict__": 

744 setattr(instance, k, value) 

745 else: 

746 instance[k] = value 

747 except TypeError: 

748 # Immutable object, must be constructed in one shot 

749 if k != "__dict__": 

750 deferred[k] = value 

751 self._namestack.pop() 

752 continue 

753 else: 

754 if not k.startswith("__"): 

755 try: 

756 setattr(instance, k, value) 

757 except KeyError: 

758 # certain numpy objects require us to prepend a _ to the var 

759 # this should go in the np handler but I think this could be 

760 # useful for other code 

761 setattr(instance, f"_{k}", value) 

762 except dataclasses.FrozenInstanceError: 

763 # issue #240 

764 # i think this is the only way to set frozen dataclass attrs 

765 object.__setattr__(instance, k, value) 

766 except AttributeError as e: 

767 # some objects raise this for read-only attributes (#422) (#478) 

768 if ( 

769 hasattr(instance, "__slots__") 

770 and not len(instance.__slots__) 

771 # we have to handle this separately because of +483 

772 and issubclass(instance.__class__, (int, str)) 

773 and self.handle_readonly 

774 ): 

775 continue 

776 raise e 

777 else: 

778 setattr(instance, f"_{instance.__class__.__name__}{k}", value) 

779 

780 # This instance has an instance variable named `k` that is 

781 # currently a proxy and must be replaced 

782 if isinstance(value, _Proxy): 

783 self._proxies.append((instance, k, value, method)) 

784 

785 # step out 

786 self._namestack.pop() 

787 

788 if deferred: 

789 # SQLAlchemy Immutable mappings must be constructed in one shot 

790 instance = instance.__class__(deferred) 

791 

792 return instance 

793 

794 def _restore_state(self, obj: Dict[str, Any], instance: Any) -> Any: 

795 state = self._restore(obj[tags.STATE]) 

796 has_slots = ( 

797 isinstance(state, tuple) and len(state) == 2 and isinstance(state[1], dict) 

798 ) 

799 has_slots_and_dict = has_slots and isinstance(state[0], dict) 

800 if hasattr(instance, "__setstate__"): 

801 instance.__setstate__(state) 

802 elif isinstance(state, dict): 

803 # implements described default handling 

804 # of state for object with instance dict 

805 # and no slots 

806 instance = self._restore_from_dict( 

807 state, instance, ignorereserved=False, restore_dict_items=False 

808 ) 

809 elif has_slots: 

810 instance = self._restore_from_dict( 

811 state[1], instance, ignorereserved=False, restore_dict_items=False 

812 ) 

813 if has_slots_and_dict: 

814 instance = self._restore_from_dict( 

815 state[0], instance, ignorereserved=False, restore_dict_items=False 

816 ) 

817 elif not hasattr(instance, "__getnewargs__") and not hasattr( 

818 instance, "__getnewargs_ex__" 

819 ): 

820 # __setstate__ is not implemented so that means that the best 

821 # we can do is return the result of __getstate__() rather than 

822 # return an empty shell of an object. 

823 # However, if there were newargs, it's not an empty shell 

824 instance = state 

825 return instance 

826 

827 def _restore_object_instance_variables( 

828 self, obj: Dict[str, Any], instance: Any 

829 ) -> Any: 

830 instance = self._restore_from_dict(obj, instance) 

831 

832 # Handle list and set subclasses 

833 if has_tag(obj, tags.SEQ): 

834 if hasattr(instance, "append"): 

835 for v in obj[tags.SEQ]: 

836 instance.append(self._restore(v)) 

837 elif hasattr(instance, "add"): 

838 for v in obj[tags.SEQ]: 

839 instance.add(self._restore(v)) 

840 

841 if has_tag(obj, tags.STATE): 

842 instance = self._restore_state(obj, instance) 

843 

844 return instance 

845 

846 def _restore_object_instance( 

847 self, obj: Dict[str, Any], cls: Type[Any], class_name: str = "" 

848 ) -> Any: 

849 # This is a placeholder proxy object which allows child objects to 

850 # reference the parent object before it has been instantiated. 

851 proxy = _Proxy() 

852 self._mkref(proxy) 

853 

854 # An object can install itself as its own factory, so load the factory 

855 # after the instance is available for referencing. 

856 factory = self._loadfactory(obj) 

857 

858 if has_tag(obj, tags.NEWARGSEX): 

859 args, kwargs = obj[tags.NEWARGSEX] 

860 else: 

861 args = getargs(obj, classes=self._classes) 

862 kwargs = {} 

863 if args: 

864 args = self._restore(args) 

865 if kwargs: 

866 kwargs = self._restore(kwargs) 

867 

868 is_oldstyle = not (isinstance(cls, type) or getattr(cls, "__meta__", None)) 

869 try: 

870 if not is_oldstyle and hasattr(cls, "__new__"): 

871 # new style classes 

872 if factory: 

873 instance = cls.__new__(cls, factory, *args, **kwargs) 

874 instance.default_factory = factory 

875 else: 

876 instance = cls.__new__(cls, *args, **kwargs) 

877 else: 

878 instance = object.__new__(cls) 

879 except TypeError: # old-style classes 

880 is_oldstyle = True 

881 

882 if is_oldstyle: 

883 try: 

884 instance = cls(*args) 

885 except TypeError: # fail gracefully 

886 try: 

887 instance = make_blank_classic(cls) 

888 except Exception: # fail gracefully 

889 self._process_missing(class_name) 

890 return self._mkref(obj) 

891 

892 proxy.reset(instance) 

893 self._swapref(proxy, instance) 

894 

895 if isinstance(instance, tuple): 

896 return instance 

897 

898 instance = self._restore_object_instance_variables(obj, instance) 

899 

900 if _safe_hasattr(instance, "default_factory") and isinstance( 

901 instance.default_factory, _Proxy 

902 ): 

903 instance.default_factory = instance.default_factory.get() 

904 

905 return instance 

906 

907 def _restore_object(self, obj: Dict[str, Any]) -> Any: 

908 class_name = obj[tags.OBJECT] 

909 cls = loadclass(class_name, classes=self._classes) 

910 handler = handlers.get(cls, handlers.get(class_name)) # type: ignore[arg-type] 

911 if handler is not None: # custom handler 

912 proxy = _Proxy() 

913 self._mkref(proxy) 

914 instance = handler(self).restore(obj) 

915 proxy.reset(instance) 

916 self._swapref(proxy, instance) 

917 return instance 

918 

919 if cls is None: 

920 self._process_missing(class_name) 

921 return self._mkref(obj) 

922 

923 return self._restore_object_instance(obj, cls, class_name) # type: ignore[arg-type] 

924 

925 def _restore_function(self, obj: Dict[str, Any]) -> Any: 

926 return loadclass(obj[tags.FUNCTION], classes=self._classes) 

927 

928 def _restore_set(self, obj: Dict[str, Any]) -> Set[Any]: 

929 try: 

930 return {self._restore(v) for v in obj[tags.SET]} 

931 except TypeError: 

932 return set() 

933 

934 def _restore_dict(self, obj: Dict[str, Any]) -> Dict[str, Any]: 

935 data = {} 

936 if not self.v1_decode: 

937 self._mkref(data) 

938 

939 # If we are decoding dicts that can have non-string keys then we 

940 # need to do a two-phase decode where the non-string keys are 

941 # processed last. This ensures a deterministic order when 

942 # assigning object IDs for references. 

943 if self.keys: 

944 # Phase 1: regular non-special keys. 

945 for k, v in util.items(obj): 

946 if _is_json_key(k): 

947 continue 

948 if isinstance(k, (int, float)): 

949 str_k = k.__str__() 

950 else: 

951 str_k = k 

952 self._namestack.append(str_k) 

953 data[k] = self._restore(v) 

954 

955 self._namestack.pop() 

956 

957 # Phase 2: object keys only. 

958 for k, v in util.items(obj): 

959 if not _is_json_key(k): 

960 continue 

961 self._namestack.append(k) 

962 

963 k = self._restore_pickled_key(k) 

964 data[k] = result = self._restore(v) 

965 # k is currently a proxy and must be replaced 

966 if isinstance(result, _Proxy): 

967 self._proxies.append((data, k, result, _obj_setvalue)) 

968 

969 self._namestack.pop() 

970 else: 

971 # No special keys, thus we don't need to restore the keys either. 

972 for k, v in util.items(obj): 

973 if isinstance(k, (int, float)): 

974 str_k = k.__str__() 

975 else: 

976 str_k = k 

977 self._namestack.append(str_k) 

978 data[k] = result = self._restore(v) 

979 if isinstance(result, _Proxy): 

980 self._proxies.append((data, k, result, _obj_setvalue)) 

981 self._namestack.pop() 

982 return data 

983 

984 def _restore_tuple(self, obj: Dict[str, Any]) -> Tuple[Any, ...]: 

985 try: 

986 return tuple(self._restore(v) for v in obj[tags.TUPLE]) 

987 except TypeError: 

988 return () 

989 

990 def _restore_tags( 

991 self, obj: Any, _passthrough: Callable[[Any], Any] = _passthrough 

992 ) -> Callable[[Any], Any]: 

993 """Return the restoration function for the specified object""" 

994 try: 

995 if not tags.RESERVED <= set(obj) and type(obj) not in (list, dict): 

996 return _passthrough 

997 except TypeError: 

998 pass 

999 if type(obj) is dict: 

1000 if tags.TUPLE in obj: 

1001 restore = self._restore_tuple 

1002 elif tags.SET in obj: 

1003 restore = self._restore_set # type: ignore[assignment] 

1004 elif tags.B64 in obj: 

1005 restore = self._restore_base64 # type: ignore[assignment] 

1006 elif tags.B85 in obj: 

1007 restore = self._restore_base85 # type: ignore[assignment] 

1008 elif tags.ID in obj: 

1009 restore = self._restore_id 

1010 elif tags.ITERATOR in obj: 

1011 restore = self._restore_iterator # type: ignore[assignment] 

1012 elif tags.OBJECT in obj: 

1013 restore = self._restore_object 

1014 elif tags.TYPE in obj: 

1015 restore = self._restore_type 

1016 elif tags.REDUCE in obj: 

1017 restore = self._restore_reduce 

1018 elif tags.FUNCTION in obj: 

1019 restore = self._restore_function 

1020 elif tags.MODULE in obj: 

1021 restore = self._restore_module 

1022 elif tags.REPR in obj: 

1023 if self.safe: 

1024 restore = self._restore_repr_safe 

1025 else: 

1026 restore = self._restore_repr 

1027 else: 

1028 restore = self._restore_dict # type: ignore[assignment] 

1029 elif type(obj) is list: 

1030 restore = self._restore_list # type: ignore[assignment] 

1031 else: 

1032 restore = _passthrough # type: ignore[assignment] 

1033 return restore