Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.11/site-packages/jsonpickle/unpickler.py: 69%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

503 statements  

1# Copyright (C) 2008 John Paulett (john -at- paulett.org) 

2# Copyright (C) 2009-2024 David Aguilar (davvid -at- gmail.com) 

3# All rights reserved. 

4# 

5# This software is licensed as described in the file COPYING, which 

6# you should have received as part of this distribution. 

7import dataclasses 

8import warnings 

9from typing import ( 

10 Any, 

11 Callable, 

12 Dict, 

13 Iterator, 

14 List, 

15 Optional, 

16 Sequence, 

17 Set, 

18 Tuple, 

19 Type, 

20 Union, 

21) 

22 

23from . import errors, handlers, tags, util 

24from .backend import JSONBackend, json 

25 

26# class names to class objects (or sequence of classes) 

27ClassesType = Optional[Union[Type[Any], Dict[str, Type[Any]], Sequence[Type[Any]]]] 

28# handler for missing classes: either a policy name or a callback 

29MissingHandler = Union[str, Callable[[str], Any]] 

30 

31 

32def decode( 

33 string: str, 

34 backend: Optional[JSONBackend] = None, 

35 # we get a lot of errors when typing with TypeVar 

36 context: Optional["Unpickler"] = None, 

37 keys: bool = False, 

38 reset: bool = True, 

39 safe: bool = True, 

40 classes: Optional[ClassesType] = None, 

41 v1_decode: bool = False, 

42 on_missing: MissingHandler = "ignore", 

43 handle_readonly: bool = False, 

44) -> Any: 

45 """Convert a JSON string into a Python object. 

46 

47 :param backend: If set to an instance of jsonpickle.backend.JSONBackend, jsonpickle 

48 will use that backend for deserialization. 

49 

50 :param context: Supply a pre-built Pickler or Unpickler object to the 

51 `jsonpickle.encode` and `jsonpickle.decode` machinery instead 

52 of creating a new instance. The `context` represents the currently 

53 active Pickler and Unpickler objects when custom handlers are 

54 invoked by jsonpickle. 

55 

56 :param keys: If set to True then jsonpickle will decode non-string dictionary keys 

57 into python objects via the jsonpickle protocol. 

58 

59 :param reset: Custom pickle handlers that use the `Pickler.flatten` method or 

60 `jsonpickle.encode` function must call `encode` with `reset=False` 

61 in order to retain object references during pickling. 

62 This flag is not typically used outside of a custom handler or 

63 `__getstate__` implementation. 

64 

65 :param safe: If set to ``False``, use of ``eval()`` for backwards-compatible (pre-0.7.0) 

66 deserialization of repr-serialized objects is enabled. Defaults to ``True``. 

67 The default value was ``False`` in jsonpickle v3 and changed to ``True`` in jsonpickle v4. 

68 

69 .. warning:: 

70 

71 ``eval()`` is used when set to ``False`` and is not secure against 

72 malicious inputs. You should avoid setting ``safe=False``. 

73 

74 :param classes: If set to a single class, or a sequence (list, set, tuple) of 

75 classes, then the classes will be made available when constructing objects. 

76 If set to a dictionary of class names to class objects, the class object 

77 will be provided to jsonpickle to deserialize the class name into. 

78 This can be used to give jsonpickle access to local classes that are not 

79 available through the global module import scope, and the dict method can 

80 be used to deserialize encoded objects into a new class. 

81 

82 :param v1_decode: If set to True it enables you to decode objects serialized in 

83 jsonpickle v1. Please do not attempt to re-encode the objects in the v1 format! 

84 Version 2's format fixes issue #255, and allows dictionary identity to be 

85 preserved through an encode/decode cycle. 

86 

87 :param on_missing: If set to 'error', it will raise an error if the class it's 

88 decoding is not found. If set to 'warn', it will warn you in said case. 

89 If set to a non-awaitable function, it will call said callback function 

90 with the class name (a string) as the only parameter. Strings passed to 

91 `on_missing` are lowercased automatically. 

92 

93 :param handle_readonly: If set to True, the Unpickler will handle objects encoded 

94 with 'handle_readonly' properly. Do not set this flag for objects not encoded 

95 with 'handle_readonly' set to True. 

96 

97 

98 >>> decode('"my string"') == 'my string' 

99 True 

100 >>> decode('36') 

101 36 

102 """ 

103 

104 if isinstance(on_missing, str): 

105 on_missing = on_missing.lower() 

106 elif not util._is_function(on_missing): 

107 warnings.warn( 

108 "Unpickler.on_missing must be a string or a function! It will be ignored!" 

109 ) 

110 

111 backend = backend or json 

112 is_ephemeral_context = context is None 

113 context = context or Unpickler( 

114 keys=keys, 

115 backend=backend, 

116 safe=safe, 

117 v1_decode=v1_decode, 

118 on_missing=on_missing, 

119 handle_readonly=handle_readonly, 

120 ) 

121 data = backend.decode(string) 

122 result = context.restore(data, reset=reset, classes=classes) 

123 if is_ephemeral_context: 

124 # Avoid holding onto references to external objects, which can 

125 # prevent garbage collection from occuring. 

126 context.reset() 

127 return result 

128 

129 

130def _safe_hasattr(obj: Any, attr: str) -> bool: 

131 """Workaround unreliable hasattr() availability on sqlalchemy objects""" 

132 try: 

133 object.__getattribute__(obj, attr) 

134 return True 

135 except AttributeError: 

136 return False 

137 

138 

139def _is_json_key(key: Any) -> bool: 

140 """Has this key a special object that has been encoded to JSON?""" 

141 return isinstance(key, str) and key.startswith(tags.JSON_KEY) 

142 

143 

144class _Proxy: 

145 """Proxies are dummy objects that are later replaced by real instances 

146 

147 The `restore()` function has to solve a tricky problem when pickling 

148 objects with cyclical references -- the parent instance does not yet 

149 exist. 

150 

151 The problem is that `__getnewargs__()`, `__getstate__()`, custom handlers, 

152 and cyclical objects graphs are allowed to reference the yet-to-be-created 

153 object via the referencing machinery. 

154 

155 In other words, objects are allowed to depend on themselves for 

156 construction! 

157 

158 We solve this problem by placing dummy Proxy objects into the referencing 

159 machinery so that we can construct the child objects before constructing 

160 the parent. Objects are initially created with Proxy attribute values 

161 instead of real references. 

162 

163 We collect all objects that contain references to proxies and run 

164 a final sweep over them to swap in the real instance. This is done 

165 at the very end of the top-level `restore()`. 

166 

167 The `instance` attribute below is replaced with the real instance 

168 after `__new__()` has been used to construct the object and is used 

169 when swapping proxies with real instances. 

170 

171 """ 

172 

173 def __init__(self) -> None: 

174 self.instance = None 

175 

176 def get(self) -> Any: 

177 return self.instance 

178 

179 def reset(self, instance: Any) -> None: 

180 self.instance = instance 

181 

182 

183class _IDProxy(_Proxy): 

184 def __init__(self, objs: List[Any], index: int) -> None: 

185 self._index = index 

186 self._objs = objs 

187 

188 def get(self) -> Any: 

189 try: 

190 return self._objs[self._index] 

191 except IndexError: 

192 return None 

193 

194 

195def _obj_setattr(obj: Any, attr: str, proxy: _Proxy) -> None: 

196 """Use setattr to update a proxy entry""" 

197 setattr(obj, attr, proxy.get()) 

198 

199 

200def _obj_setvalue(obj: Any, idx: Any, proxy: _Proxy) -> None: 

201 """Use obj[key] assignments to update a proxy entry""" 

202 obj[idx] = proxy.get() 

203 

204 

205def has_tag(obj: Any, tag: str) -> bool: 

206 """Helper class that tests to see if the obj is a dictionary 

207 and contains a particular key/tag. 

208 

209 >>> obj = {'test': 1} 

210 >>> has_tag(obj, 'test') 

211 True 

212 >>> has_tag(obj, 'fail') 

213 False 

214 

215 >>> has_tag(42, 'fail') 

216 False 

217 

218 """ 

219 return type(obj) is dict and tag in obj 

220 

221 

222def getargs(obj: Dict[str, Any], classes: Optional[Dict[str, Type[Any]]] = None) -> Any: 

223 """Return arguments suitable for __new__()""" 

224 # Let saved newargs take precedence over everything 

225 if has_tag(obj, tags.NEWARGSEX): 

226 raise ValueError("__newargs_ex__ returns both args and kwargs") 

227 

228 if has_tag(obj, tags.NEWARGS): 

229 return obj[tags.NEWARGS] 

230 

231 if has_tag(obj, tags.INITARGS): 

232 return obj[tags.INITARGS] 

233 

234 try: 

235 seq_list = obj[tags.SEQ] 

236 obj_dict = obj[tags.OBJECT] 

237 except KeyError: 

238 return [] 

239 typeref = util.loadclass(obj_dict, classes=classes) 

240 if not typeref: 

241 return [] 

242 if hasattr(typeref, "_fields"): 

243 if len(typeref._fields) == len(seq_list): 

244 return seq_list 

245 return [] 

246 

247 

248class _trivialclassic: 

249 """ 

250 A trivial class that can be instantiated with no args 

251 """ 

252 

253 

254def make_blank_classic(cls: Type[Any]) -> Any: 

255 """ 

256 Implement the mandated strategy for dealing with classic classes 

257 which cannot be instantiated without __getinitargs__ because they 

258 take parameters 

259 """ 

260 instance = _trivialclassic() 

261 instance.__class__ = cls 

262 return instance 

263 

264 

265def loadrepr(reprstr: str) -> Any: 

266 """Returns an instance of the object from the object's repr() string. 

267 It involves the dynamic specification of code. 

268 

269 .. warning:: 

270 

271 This function is unsafe and uses `eval()`. 

272 

273 >>> obj = loadrepr('datetime/datetime.datetime.now()') 

274 >>> obj.__class__.__name__ 

275 'datetime' 

276 

277 """ 

278 module, evalstr = reprstr.split("/") 

279 mylocals = locals() 

280 localname = module 

281 if "." in localname: 

282 localname = module.split(".", 1)[0] 

283 mylocals[localname] = __import__(module) 

284 return eval(evalstr, mylocals) 

285 

286 

287def _loadmodule(module_str: str) -> Optional[Any]: 

288 """Returns a reference to a module. 

289 

290 >>> fn = _loadmodule('datetime/datetime.datetime.fromtimestamp') 

291 >>> fn.__name__ 

292 'fromtimestamp' 

293 

294 """ 

295 module, identifier = module_str.split("/") 

296 try: 

297 result = __import__(module) 

298 except ImportError: 

299 return None 

300 identifier_parts = identifier.split(".") 

301 first_identifier = identifier_parts[0] 

302 if first_identifier != module and not module.startswith(f"{first_identifier}."): 

303 return None 

304 for name in identifier_parts[1:]: 

305 try: 

306 result = getattr(result, name) 

307 except AttributeError: 

308 return None 

309 return result 

310 

311 

312def has_tag_dict(obj: Any, tag: str) -> bool: 

313 """Helper class that tests to see if the obj is a dictionary 

314 and contains a particular key/tag. 

315 

316 >>> obj = {'test': 1} 

317 >>> has_tag(obj, 'test') 

318 True 

319 >>> has_tag(obj, 'fail') 

320 False 

321 

322 >>> has_tag(42, 'fail') 

323 False 

324 

325 """ 

326 return tag in obj 

327 

328 

329def _passthrough(value: Any) -> Any: 

330 """A function that returns its input as-is""" 

331 return value 

332 

333 

334class Unpickler: 

335 def __init__( 

336 self, 

337 backend: Optional[JSONBackend] = None, 

338 keys: bool = False, 

339 safe: bool = True, 

340 v1_decode: bool = False, 

341 on_missing: MissingHandler = "ignore", 

342 handle_readonly: bool = False, 

343 ) -> None: 

344 self.backend = backend or json 

345 self.keys = keys 

346 self.safe = safe 

347 self.v1_decode = v1_decode 

348 self.on_missing = on_missing 

349 self.handle_readonly = handle_readonly 

350 

351 self.reset() 

352 

353 def reset(self) -> None: 

354 """Resets the object's internal state.""" 

355 # Map reference names to object instances 

356 self._namedict = {} 

357 

358 # The stack of names traversed for child objects 

359 self._namestack = [] 

360 

361 # Map of objects to their index in the _objs list 

362 self._obj_to_idx = {} 

363 self._objs = [] 

364 self._proxies = [] 

365 

366 # Extra local classes not accessible globally 

367 self._classes = {} 

368 

369 def _swap_proxies(self) -> None: 

370 """Replace proxies with their corresponding instances""" 

371 for obj, attr, proxy, method in self._proxies: 

372 method(obj, attr, proxy) 

373 self._proxies = [] 

374 

375 def _restore( 

376 self, obj: Any, _passthrough: Callable[[Any], Any] = _passthrough 

377 ) -> Any: 

378 # if obj isn't in these types, neither it nor nothing in it can have a tag 

379 # don't change the tuple of types to a set, it won't work with isinstance 

380 if not isinstance(obj, (str, list, dict, set, tuple)): 

381 restore = _passthrough 

382 else: 

383 restore = self._restore_tags(obj) 

384 return restore(obj) 

385 

386 def restore( 

387 self, obj: Any, reset: bool = True, classes: Optional[ClassesType] = None 

388 ) -> Any: 

389 """Restores a flattened object to its original python state. 

390 

391 Simply returns any of the basic builtin types 

392 

393 >>> u = Unpickler() 

394 >>> u.restore('hello world') == 'hello world' 

395 True 

396 >>> u.restore({'key': 'value'}) == {'key': 'value'} 

397 True 

398 

399 """ 

400 if reset: 

401 self.reset() 

402 if classes: 

403 self.register_classes(classes) 

404 value = self._restore(obj) 

405 if reset: 

406 self._swap_proxies() 

407 return value 

408 

409 def register_classes(self, classes: ClassesType) -> None: 

410 """Register one or more classes 

411 

412 :param classes: sequence of classes or a single class to register 

413 

414 """ 

415 if isinstance(classes, (list, tuple, set)): 

416 for cls in classes: 

417 self.register_classes(cls) 

418 elif isinstance(classes, dict): 

419 self._classes.update( 

420 ( 

421 cls if isinstance(cls, str) else util.importable_name(cls), 

422 handler, 

423 ) 

424 for cls, handler in classes.items() 

425 ) 

426 else: 

427 self._classes[util.importable_name(classes)] = classes # type: ignore[arg-type] 

428 

429 def _restore_base64(self, obj: Dict[str, Any]) -> bytes: 

430 try: 

431 return util.b64decode(obj[tags.B64].encode("utf-8")) 

432 except (AttributeError, UnicodeEncodeError): 

433 return b"" 

434 

435 def _restore_base85(self, obj: Dict[str, Any]) -> bytes: 

436 try: 

437 return util.b85decode(obj[tags.B85].encode("utf-8")) 

438 except (AttributeError, UnicodeEncodeError): 

439 return b"" 

440 

441 def _refname(self) -> str: 

442 """Calculates the name of the current location in the JSON stack. 

443 

444 This is called as jsonpickle traverses the object structure to 

445 create references to previously-traversed objects. This allows 

446 cyclical data structures such as doubly-linked lists. 

447 jsonpickle ensures that duplicate python references to the same 

448 object results in only a single JSON object definition and 

449 special reference tags to represent each reference. 

450 

451 >>> u = Unpickler() 

452 >>> u._namestack = [] 

453 >>> u._refname() == '/' 

454 True 

455 >>> u._namestack = ['a'] 

456 >>> u._refname() == '/a' 

457 True 

458 >>> u._namestack = ['a', 'b'] 

459 >>> u._refname() == '/a/b' 

460 True 

461 

462 """ 

463 return "/" + "/".join(self._namestack) 

464 

465 def _mkref(self, obj: Any) -> Any: 

466 obj_id = id(obj) 

467 try: 

468 _ = self._obj_to_idx[obj_id] 

469 except KeyError: 

470 self._obj_to_idx[obj_id] = len(self._objs) 

471 self._objs.append(obj) 

472 # Backwards compatibility: old versions of jsonpickle 

473 # produced "py/ref" references. 

474 self._namedict[self._refname()] = obj 

475 return obj 

476 

477 def _restore_list(self, obj: List[Any]) -> List[Any]: 

478 parent = [] 

479 self._mkref(parent) 

480 children = [self._restore(v) for v in obj] 

481 parent.extend(children) 

482 method = _obj_setvalue 

483 proxies = [ 

484 (parent, idx, value, method) 

485 for idx, value in enumerate(parent) 

486 if isinstance(value, _Proxy) 

487 ] 

488 self._proxies.extend(proxies) 

489 return parent 

490 

491 def _restore_iterator(self, obj: Dict[str, Any]) -> Iterator[Any]: 

492 try: 

493 return iter(self._restore_list(obj[tags.ITERATOR])) 

494 except TypeError: 

495 return iter([]) 

496 

497 def _swapref(self, proxy: _Proxy, instance: Any) -> None: 

498 proxy_id = id(proxy) 

499 instance_id = id(instance) 

500 

501 instance_index = self._obj_to_idx[proxy_id] 

502 self._obj_to_idx[instance_id] = instance_index 

503 del self._obj_to_idx[proxy_id] 

504 

505 self._objs[instance_index] = instance 

506 self._namedict[self._refname()] = instance 

507 

508 def _restore_reduce(self, obj: Dict[str, Any]) -> Any: 

509 """ 

510 Supports restoring with all elements of __reduce__ as per pep 307. 

511 Assumes that iterator items (the last two) are represented as lists 

512 as per pickler implementation. 

513 """ 

514 proxy = _Proxy() 

515 self._mkref(proxy) 

516 try: 

517 reduce_val = list(map(self._restore, obj[tags.REDUCE])) 

518 except TypeError: 

519 result = [] 

520 proxy.reset(result) 

521 self._swapref(proxy, result) 

522 return result 

523 if len(reduce_val) < 5: 

524 reduce_val.extend([None] * (5 - len(reduce_val))) 

525 f, args, state, listitems, dictitems = reduce_val 

526 

527 if f == tags.NEWOBJ or getattr(f, "__name__", "") == "__newobj__": 

528 # mandated special case 

529 cls = args[0] 

530 if not isinstance(cls, type): 

531 cls = self._restore(cls) 

532 stage1 = cls.__new__(cls, *args[1:]) 

533 else: 

534 if not callable(f): 

535 result = [] 

536 proxy.reset(result) 

537 self._swapref(proxy, result) 

538 return result 

539 try: 

540 stage1 = f(*args) 

541 except TypeError: 

542 # this happens when there are missing kwargs and args don't match so we bypass 

543 # __init__ since the state dict will set all attributes immediately afterwards 

544 stage1 = f.__new__(f, *args) 

545 

546 if state: 

547 try: 

548 stage1.__setstate__(state) 

549 except AttributeError: 

550 # it's fine - we'll try the prescribed default methods 

551 try: 

552 # we can't do a straight update here because we 

553 # need object identity of the state dict to be 

554 # preserved so that _swap_proxies works out 

555 for k, v in stage1.__dict__.items(): 

556 state.setdefault(k, v) 

557 stage1.__dict__ = state 

558 except AttributeError: 

559 # next prescribed default 

560 try: 

561 for k, v in state.items(): 

562 setattr(stage1, k, v) 

563 except Exception: 

564 dict_state, slots_state = state 

565 if dict_state: 

566 stage1.__dict__.update(dict_state) 

567 if slots_state: 

568 for k, v in slots_state.items(): 

569 setattr(stage1, k, v) 

570 

571 if listitems: 

572 # should be lists if not None 

573 try: 

574 stage1.extend(listitems) 

575 except AttributeError: 

576 for x in listitems: 

577 stage1.append(x) 

578 

579 if dictitems: 

580 for k, v in dictitems: 

581 stage1.__setitem__(k, v) 

582 

583 proxy.reset(stage1) 

584 self._swapref(proxy, stage1) 

585 return stage1 

586 

587 def _restore_id(self, obj: Dict[str, Any]) -> Any: 

588 try: 

589 idx = obj[tags.ID] 

590 return self._objs[idx] 

591 except IndexError: 

592 return _IDProxy(self._objs, idx) 

593 except TypeError: 

594 return None 

595 

596 def _restore_type(self, obj: Dict[str, Any]) -> Any: 

597 typeref = util.loadclass(obj[tags.TYPE], classes=self._classes) 

598 if typeref is None: 

599 return obj 

600 return typeref 

601 

602 def _restore_module(self, obj: Dict[str, Any]) -> Any: 

603 new_obj = _loadmodule(obj[tags.MODULE]) 

604 return self._mkref(new_obj) 

605 

606 def _restore_repr_safe(self, obj: Dict[str, Any]) -> Any: 

607 new_obj = _loadmodule(obj[tags.REPR]) 

608 return self._mkref(new_obj) 

609 

610 def _restore_repr(self, obj: Dict[str, Any]) -> Any: 

611 obj = loadrepr(obj[tags.REPR]) 

612 return self._mkref(obj) 

613 

614 def _loadfactory(self, obj: Dict[str, Any]) -> Optional[Any]: 

615 default_factory = None 

616 for key in (tags.DEFAULT_FACTORY, "default_factory"): 

617 try: 

618 default_factory = obj.pop(key) 

619 break 

620 except KeyError: 

621 continue 

622 if default_factory is None: 

623 return None 

624 return self._restore(default_factory) 

625 

626 def _process_missing(self, class_name: str) -> None: 

627 # most common case comes first 

628 if self.on_missing == "ignore": 

629 pass 

630 elif self.on_missing == "warn": 

631 warnings.warn("Unpickler._restore_object could not find %s!" % class_name) 

632 elif self.on_missing == "error": 

633 raise errors.ClassNotFoundError( 

634 "Unpickler.restore_object could not find %s!" % class_name # type: ignore[arg-type] 

635 ) 

636 elif util._is_function(self.on_missing): 

637 self.on_missing(class_name) # type: ignore[operator] 

638 

639 def _restore_pickled_key(self, key: str) -> Any: 

640 """Restore a possibly pickled key""" 

641 if _is_json_key(key): 

642 key = decode( 

643 key[len(tags.JSON_KEY) :], 

644 backend=self.backend, 

645 context=self, 

646 keys=True, 

647 reset=False, 

648 ) 

649 return key 

650 

651 def _restore_key_fn( 

652 self, _passthrough: Callable[[Any], Any] = _passthrough 

653 ) -> Callable[[Any], Any]: 

654 """Return a callable that restores keys 

655 

656 This function is responsible for restoring non-string keys 

657 when we are decoding with `keys=True`. 

658 

659 """ 

660 # This function is called before entering a tight loop 

661 # where the returned function will be called. 

662 # We return a specific function after checking self.keys 

663 # instead of doing so in the body of the function to 

664 # avoid conditional branching inside a tight loop. 

665 if self.keys: 

666 restore_key = self._restore_pickled_key 

667 else: 

668 restore_key = _passthrough # type: ignore[assignment] 

669 return restore_key 

670 

671 def _restore_from_dict( 

672 self, 

673 obj: Dict[str, Any], 

674 instance: Any, 

675 ignorereserved: bool = True, 

676 restore_dict_items: bool = True, 

677 ) -> Any: 

678 restore_key = self._restore_key_fn() 

679 method = _obj_setattr 

680 deferred = {} 

681 

682 for k, v in util.items(obj): 

683 # ignore the reserved attribute 

684 if ignorereserved and k in tags.RESERVED: 

685 continue 

686 if isinstance(k, (int, float)): 

687 str_k = k.__str__() 

688 else: 

689 str_k = k 

690 self._namestack.append(str_k) 

691 if restore_dict_items: 

692 k = restore_key(k) 

693 # step into the namespace 

694 value = self._restore(v) 

695 else: 

696 value = v 

697 if util._is_noncomplex(instance) or util._is_dictionary_subclass(instance): 

698 try: 

699 if k == "__dict__": 

700 setattr(instance, k, value) 

701 else: 

702 instance[k] = value 

703 except TypeError: 

704 # Immutable object, must be constructed in one shot 

705 if k != "__dict__": 

706 deferred[k] = value 

707 self._namestack.pop() 

708 continue 

709 else: 

710 if not k.startswith("__"): 

711 try: 

712 setattr(instance, k, value) 

713 except KeyError: 

714 # certain numpy objects require us to prepend a _ to the var 

715 # this should go in the np handler but I think this could be 

716 # useful for other code 

717 setattr(instance, f"_{k}", value) 

718 except dataclasses.FrozenInstanceError: 

719 # issue #240 

720 # i think this is the only way to set frozen dataclass attrs 

721 object.__setattr__(instance, k, value) 

722 except AttributeError as e: 

723 # some objects raise this for read-only attributes (#422) (#478) 

724 if ( 

725 hasattr(instance, "__slots__") 

726 and not len(instance.__slots__) 

727 # we have to handle this separately because of +483 

728 and issubclass(instance.__class__, (int, str)) 

729 and self.handle_readonly 

730 ): 

731 continue 

732 raise e 

733 else: 

734 setattr(instance, f"_{instance.__class__.__name__}{k}", value) 

735 

736 # This instance has an instance variable named `k` that is 

737 # currently a proxy and must be replaced 

738 if isinstance(value, _Proxy): 

739 self._proxies.append((instance, k, value, method)) 

740 

741 # step out 

742 self._namestack.pop() 

743 

744 if deferred: 

745 # SQLAlchemy Immutable mappings must be constructed in one shot 

746 instance = instance.__class__(deferred) 

747 

748 return instance 

749 

750 def _restore_state(self, obj: Dict[str, Any], instance: Any) -> Any: 

751 state = self._restore(obj[tags.STATE]) 

752 has_slots = ( 

753 isinstance(state, tuple) and len(state) == 2 and isinstance(state[1], dict) 

754 ) 

755 has_slots_and_dict = has_slots and isinstance(state[0], dict) 

756 if hasattr(instance, "__setstate__"): 

757 instance.__setstate__(state) 

758 elif isinstance(state, dict): 

759 # implements described default handling 

760 # of state for object with instance dict 

761 # and no slots 

762 instance = self._restore_from_dict( 

763 state, instance, ignorereserved=False, restore_dict_items=False 

764 ) 

765 elif has_slots: 

766 instance = self._restore_from_dict( 

767 state[1], instance, ignorereserved=False, restore_dict_items=False 

768 ) 

769 if has_slots_and_dict: 

770 instance = self._restore_from_dict( 

771 state[0], instance, ignorereserved=False, restore_dict_items=False 

772 ) 

773 elif not hasattr(instance, "__getnewargs__") and not hasattr( 

774 instance, "__getnewargs_ex__" 

775 ): 

776 # __setstate__ is not implemented so that means that the best 

777 # we can do is return the result of __getstate__() rather than 

778 # return an empty shell of an object. 

779 # However, if there were newargs, it's not an empty shell 

780 instance = state 

781 return instance 

782 

783 def _restore_object_instance_variables( 

784 self, obj: Dict[str, Any], instance: Any 

785 ) -> Any: 

786 instance = self._restore_from_dict(obj, instance) 

787 

788 # Handle list and set subclasses 

789 if has_tag(obj, tags.SEQ): 

790 if hasattr(instance, "append"): 

791 for v in obj[tags.SEQ]: 

792 instance.append(self._restore(v)) 

793 elif hasattr(instance, "add"): 

794 for v in obj[tags.SEQ]: 

795 instance.add(self._restore(v)) 

796 

797 if has_tag(obj, tags.STATE): 

798 instance = self._restore_state(obj, instance) 

799 

800 return instance 

801 

802 def _restore_object_instance( 

803 self, obj: Dict[str, Any], cls: Type[Any], class_name: str = "" 

804 ) -> Any: 

805 # This is a placeholder proxy object which allows child objects to 

806 # reference the parent object before it has been instantiated. 

807 proxy = _Proxy() 

808 self._mkref(proxy) 

809 

810 # An object can install itself as its own factory, so load the factory 

811 # after the instance is available for referencing. 

812 factory = self._loadfactory(obj) 

813 

814 if has_tag(obj, tags.NEWARGSEX): 

815 args, kwargs = obj[tags.NEWARGSEX] 

816 else: 

817 args = getargs(obj, classes=self._classes) 

818 kwargs = {} 

819 if args: 

820 args = self._restore(args) 

821 if kwargs: 

822 kwargs = self._restore(kwargs) 

823 

824 is_oldstyle = not (isinstance(cls, type) or getattr(cls, "__meta__", None)) 

825 try: 

826 if not is_oldstyle and hasattr(cls, "__new__"): 

827 # new style classes 

828 if factory: 

829 instance = cls.__new__(cls, factory, *args, **kwargs) 

830 instance.default_factory = factory 

831 else: 

832 instance = cls.__new__(cls, *args, **kwargs) 

833 else: 

834 instance = object.__new__(cls) 

835 except TypeError: # old-style classes 

836 is_oldstyle = True 

837 

838 if is_oldstyle: 

839 try: 

840 instance = cls(*args) 

841 except TypeError: # fail gracefully 

842 try: 

843 instance = make_blank_classic(cls) 

844 except Exception: # fail gracefully 

845 self._process_missing(class_name) 

846 return self._mkref(obj) 

847 

848 proxy.reset(instance) 

849 self._swapref(proxy, instance) 

850 

851 if isinstance(instance, tuple): 

852 return instance 

853 

854 instance = self._restore_object_instance_variables(obj, instance) 

855 

856 if _safe_hasattr(instance, "default_factory") and isinstance( 

857 instance.default_factory, _Proxy 

858 ): 

859 instance.default_factory = instance.default_factory.get() 

860 

861 return instance 

862 

863 def _restore_object(self, obj: Dict[str, Any]) -> Any: 

864 class_name = obj[tags.OBJECT] 

865 cls = util.loadclass(class_name, classes=self._classes) 

866 handler = handlers.get(cls, handlers.get(class_name)) # type: ignore[arg-type] 

867 if handler is not None: # custom handler 

868 proxy = _Proxy() 

869 self._mkref(proxy) 

870 instance = handler(self).restore(obj) 

871 proxy.reset(instance) 

872 self._swapref(proxy, instance) 

873 return instance 

874 

875 if cls is None: 

876 self._process_missing(class_name) 

877 return self._mkref(obj) 

878 

879 return self._restore_object_instance(obj, cls, class_name) 

880 

881 def _restore_function(self, obj: Dict[str, Any]) -> Any: 

882 return util.loadclass(obj[tags.FUNCTION], classes=self._classes) 

883 

884 def _restore_set(self, obj: Dict[str, Any]) -> Set[Any]: 

885 try: 

886 return {self._restore(v) for v in obj[tags.SET]} 

887 except TypeError: 

888 return set() 

889 

890 def _restore_dict(self, obj: Dict[str, Any]) -> Dict[str, Any]: 

891 data = {} 

892 if not self.v1_decode: 

893 self._mkref(data) 

894 

895 # If we are decoding dicts that can have non-string keys then we 

896 # need to do a two-phase decode where the non-string keys are 

897 # processed last. This ensures a deterministic order when 

898 # assigning object IDs for references. 

899 if self.keys: 

900 # Phase 1: regular non-special keys. 

901 for k, v in util.items(obj): 

902 if _is_json_key(k): 

903 continue 

904 if isinstance(k, (int, float)): 

905 str_k = k.__str__() 

906 else: 

907 str_k = k 

908 self._namestack.append(str_k) 

909 data[k] = self._restore(v) 

910 

911 self._namestack.pop() 

912 

913 # Phase 2: object keys only. 

914 for k, v in util.items(obj): 

915 if not _is_json_key(k): 

916 continue 

917 self._namestack.append(k) 

918 

919 k = self._restore_pickled_key(k) 

920 data[k] = result = self._restore(v) 

921 # k is currently a proxy and must be replaced 

922 if isinstance(result, _Proxy): 

923 self._proxies.append((data, k, result, _obj_setvalue)) 

924 

925 self._namestack.pop() 

926 else: 

927 # No special keys, thus we don't need to restore the keys either. 

928 for k, v in util.items(obj): 

929 if isinstance(k, (int, float)): 

930 str_k = k.__str__() 

931 else: 

932 str_k = k 

933 self._namestack.append(str_k) 

934 data[k] = result = self._restore(v) 

935 if isinstance(result, _Proxy): 

936 self._proxies.append((data, k, result, _obj_setvalue)) 

937 self._namestack.pop() 

938 return data 

939 

940 def _restore_tuple(self, obj: Dict[str, Any]) -> Tuple[Any, ...]: 

941 try: 

942 return tuple(self._restore(v) for v in obj[tags.TUPLE]) 

943 except TypeError: 

944 return () 

945 

946 def _restore_tags( 

947 self, obj: Any, _passthrough: Callable[[Any], Any] = _passthrough 

948 ) -> Callable[[Any], Any]: 

949 """Return the restoration function for the specified object""" 

950 try: 

951 if not tags.RESERVED <= set(obj) and type(obj) not in (list, dict): 

952 return _passthrough 

953 except TypeError: 

954 pass 

955 if type(obj) is dict: 

956 if tags.TUPLE in obj: 

957 restore = self._restore_tuple 

958 elif tags.SET in obj: 

959 restore = self._restore_set # type: ignore[assignment] 

960 elif tags.B64 in obj: 

961 restore = self._restore_base64 # type: ignore[assignment] 

962 elif tags.B85 in obj: 

963 restore = self._restore_base85 # type: ignore[assignment] 

964 elif tags.ID in obj: 

965 restore = self._restore_id 

966 elif tags.ITERATOR in obj: 

967 restore = self._restore_iterator # type: ignore[assignment] 

968 elif tags.OBJECT in obj: 

969 restore = self._restore_object 

970 elif tags.TYPE in obj: 

971 restore = self._restore_type 

972 elif tags.REDUCE in obj: 

973 restore = self._restore_reduce 

974 elif tags.FUNCTION in obj: 

975 restore = self._restore_function 

976 elif tags.MODULE in obj: 

977 restore = self._restore_module 

978 elif tags.REPR in obj: 

979 if self.safe: 

980 restore = self._restore_repr_safe 

981 else: 

982 restore = self._restore_repr 

983 else: 

984 restore = self._restore_dict # type: ignore[assignment] 

985 elif type(obj) is list: 

986 restore = self._restore_list # type: ignore[assignment] 

987 else: 

988 restore = _passthrough # type: ignore[assignment] 

989 return restore