Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.11/site-packages/jsonpickle/unpickler.py: 69%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

499 statements  

1# Copyright (C) 2008 John Paulett (john -at- paulett.org) 

2# Copyright (C) 2009-2024 David Aguilar (davvid -at- gmail.com) 

3# All rights reserved. 

4# 

5# This software is licensed as described in the file COPYING, which 

6# you should have received as part of this distribution. 

7import dataclasses 

8import warnings 

9from typing import ( 

10 Any, 

11 Callable, 

12 Dict, 

13 Iterator, 

14 List, 

15 Optional, 

16 Sequence, 

17 Set, 

18 Tuple, 

19 Type, 

20 Union, 

21) 

22 

23from . import errors, handlers, tags, util 

24from .backend import JSONBackend, json 

25 

26# class names to class objects (or sequence of classes) 

27ClassesType = Optional[Union[Type[Any], Dict[str, Type[Any]], Sequence[Type[Any]]]] 

28# handler for missing classes: either a policy name or a callback 

29MissingHandler = Union[str, Callable[[str], Any]] 

30 

31 

32def decode( 

33 string: str, 

34 backend: Optional[JSONBackend] = None, 

35 # we get a lot of errors when typing with TypeVar 

36 context: Optional["Unpickler"] = None, 

37 keys: bool = False, 

38 reset: bool = True, 

39 safe: bool = True, 

40 classes: Optional[ClassesType] = None, 

41 v1_decode: bool = False, 

42 on_missing: MissingHandler = "ignore", 

43 handle_readonly: bool = False, 

44) -> Any: 

45 """Convert a JSON string into a Python object. 

46 

47 :param backend: If set to an instance of jsonpickle.backend.JSONBackend, jsonpickle 

48 will use that backend for deserialization. 

49 

50 :param context: Supply a pre-built Pickler or Unpickler object to the 

51 `jsonpickle.encode` and `jsonpickle.decode` machinery instead 

52 of creating a new instance. The `context` represents the currently 

53 active Pickler and Unpickler objects when custom handlers are 

54 invoked by jsonpickle. 

55 

56 :param keys: If set to True then jsonpickle will decode non-string dictionary keys 

57 into python objects via the jsonpickle protocol. 

58 

59 :param reset: Custom pickle handlers that use the `Pickler.flatten` method or 

60 `jsonpickle.encode` function must call `encode` with `reset=False` 

61 in order to retain object references during pickling. 

62 This flag is not typically used outside of a custom handler or 

63 `__getstate__` implementation. 

64 

65 :param safe: If set to ``False``, use of ``eval()`` for backwards-compatible (pre-0.7.0) 

66 deserialization of repr-serialized objects is enabled. Defaults to ``True``. 

67 The default value was ``False`` in jsonpickle v3 and changed to ``True`` in jsonpickle v4. 

68 

69 .. warning:: 

70 

71 ``eval()`` is used when set to ``False`` and is not secure against 

72 malicious inputs. You should avoid setting ``safe=False``. 

73 

74 :param classes: If set to a single class, or a sequence (list, set, tuple) of 

75 classes, then the classes will be made available when constructing objects. 

76 If set to a dictionary of class names to class objects, the class object 

77 will be provided to jsonpickle to deserialize the class name into. 

78 This can be used to give jsonpickle access to local classes that are not 

79 available through the global module import scope, and the dict method can 

80 be used to deserialize encoded objects into a new class. 

81 

82 :param v1_decode: If set to True it enables you to decode objects serialized in 

83 jsonpickle v1. Please do not attempt to re-encode the objects in the v1 format! 

84 Version 2's format fixes issue #255, and allows dictionary identity to be 

85 preserved through an encode/decode cycle. 

86 

87 :param on_missing: If set to 'error', it will raise an error if the class it's 

88 decoding is not found. If set to 'warn', it will warn you in said case. 

89 If set to a non-awaitable function, it will call said callback function 

90 with the class name (a string) as the only parameter. Strings passed to 

91 `on_missing` are lowercased automatically. 

92 

93 :param handle_readonly: If set to True, the Unpickler will handle objects encoded 

94 with 'handle_readonly' properly. Do not set this flag for objects not encoded 

95 with 'handle_readonly' set to True. 

96 

97 

98 >>> decode('"my string"') == 'my string' 

99 True 

100 >>> decode('36') 

101 36 

102 """ 

103 

104 if isinstance(on_missing, str): 

105 on_missing = on_missing.lower() 

106 elif not util._is_function(on_missing): 

107 warnings.warn( 

108 "Unpickler.on_missing must be a string or a function! It will be ignored!" 

109 ) 

110 

111 backend = backend or json 

112 is_ephemeral_context = context is None 

113 context = context or Unpickler( 

114 keys=keys, 

115 backend=backend, 

116 safe=safe, 

117 v1_decode=v1_decode, 

118 on_missing=on_missing, 

119 handle_readonly=handle_readonly, 

120 ) 

121 data = backend.decode(string) 

122 result = context.restore(data, reset=reset, classes=classes) 

123 if is_ephemeral_context: 

124 # Avoid holding onto references to external objects, which can 

125 # prevent garbage collection from occuring. 

126 context.reset() 

127 return result 

128 

129 

130def _safe_hasattr(obj: Any, attr: str) -> bool: 

131 """Workaround unreliable hasattr() availability on sqlalchemy objects""" 

132 try: 

133 object.__getattribute__(obj, attr) 

134 return True 

135 except AttributeError: 

136 return False 

137 

138 

139def _is_json_key(key: Any) -> bool: 

140 """Has this key a special object that has been encoded to JSON?""" 

141 return isinstance(key, str) and key.startswith(tags.JSON_KEY) 

142 

143 

144class _Proxy: 

145 """Proxies are dummy objects that are later replaced by real instances 

146 

147 The `restore()` function has to solve a tricky problem when pickling 

148 objects with cyclical references -- the parent instance does not yet 

149 exist. 

150 

151 The problem is that `__getnewargs__()`, `__getstate__()`, custom handlers, 

152 and cyclical objects graphs are allowed to reference the yet-to-be-created 

153 object via the referencing machinery. 

154 

155 In other words, objects are allowed to depend on themselves for 

156 construction! 

157 

158 We solve this problem by placing dummy Proxy objects into the referencing 

159 machinery so that we can construct the child objects before constructing 

160 the parent. Objects are initially created with Proxy attribute values 

161 instead of real references. 

162 

163 We collect all objects that contain references to proxies and run 

164 a final sweep over them to swap in the real instance. This is done 

165 at the very end of the top-level `restore()`. 

166 

167 The `instance` attribute below is replaced with the real instance 

168 after `__new__()` has been used to construct the object and is used 

169 when swapping proxies with real instances. 

170 

171 """ 

172 

173 def __init__(self) -> None: 

174 self.instance = None 

175 

176 def get(self) -> Any: 

177 return self.instance 

178 

179 def reset(self, instance: Any) -> None: 

180 self.instance = instance 

181 

182 

183class _IDProxy(_Proxy): 

184 def __init__(self, objs: List[Any], index: int) -> None: 

185 self._index = index 

186 self._objs = objs 

187 

188 def get(self) -> Any: 

189 try: 

190 return self._objs[self._index] 

191 except IndexError: 

192 return None 

193 

194 

195def _obj_setattr(obj: Any, attr: str, proxy: _Proxy) -> None: 

196 """Use setattr to update a proxy entry""" 

197 setattr(obj, attr, proxy.get()) 

198 

199 

200def _obj_setvalue(obj: Any, idx: Any, proxy: _Proxy) -> None: 

201 """Use obj[key] assignments to update a proxy entry""" 

202 obj[idx] = proxy.get() 

203 

204 

205def has_tag(obj: Any, tag: str) -> bool: 

206 """Helper class that tests to see if the obj is a dictionary 

207 and contains a particular key/tag. 

208 

209 >>> obj = {'test': 1} 

210 >>> has_tag(obj, 'test') 

211 True 

212 >>> has_tag(obj, 'fail') 

213 False 

214 

215 >>> has_tag(42, 'fail') 

216 False 

217 

218 """ 

219 return type(obj) is dict and tag in obj 

220 

221 

222def getargs(obj: Dict[str, Any], classes: Optional[Dict[str, Type[Any]]] = None) -> Any: 

223 """Return arguments suitable for __new__()""" 

224 # Let saved newargs take precedence over everything 

225 if has_tag(obj, tags.NEWARGSEX): 

226 raise ValueError("__newargs_ex__ returns both args and kwargs") 

227 

228 if has_tag(obj, tags.NEWARGS): 

229 return obj[tags.NEWARGS] 

230 

231 if has_tag(obj, tags.INITARGS): 

232 return obj[tags.INITARGS] 

233 

234 try: 

235 seq_list = obj[tags.SEQ] 

236 obj_dict = obj[tags.OBJECT] 

237 except KeyError: 

238 return [] 

239 typeref = util.loadclass(obj_dict, classes=classes) 

240 if not typeref: 

241 return [] 

242 if hasattr(typeref, "_fields"): 

243 if len(typeref._fields) == len(seq_list): 

244 return seq_list 

245 return [] 

246 

247 

248class _trivialclassic: 

249 """ 

250 A trivial class that can be instantiated with no args 

251 """ 

252 

253 

254def make_blank_classic(cls: Type[Any]) -> Any: 

255 """ 

256 Implement the mandated strategy for dealing with classic classes 

257 which cannot be instantiated without __getinitargs__ because they 

258 take parameters 

259 """ 

260 instance = _trivialclassic() 

261 instance.__class__ = cls 

262 return instance 

263 

264 

265def loadrepr(reprstr: str) -> Any: 

266 """Returns an instance of the object from the object's repr() string. 

267 It involves the dynamic specification of code. 

268 

269 .. warning:: 

270 

271 This function is unsafe and uses `eval()`. 

272 

273 >>> obj = loadrepr('datetime/datetime.datetime.now()') 

274 >>> obj.__class__.__name__ 

275 'datetime' 

276 

277 """ 

278 module, evalstr = reprstr.split("/") 

279 mylocals = locals() 

280 localname = module 

281 if "." in localname: 

282 localname = module.split(".", 1)[0] 

283 mylocals[localname] = __import__(module) 

284 return eval(evalstr, mylocals) 

285 

286 

287def _loadmodule(module_str: str) -> Optional[Any]: 

288 """Returns a reference to a module. 

289 

290 >>> fn = _loadmodule('datetime/datetime.datetime.fromtimestamp') 

291 >>> fn.__name__ 

292 'fromtimestamp' 

293 

294 """ 

295 module, identifier = module_str.split("/") 

296 try: 

297 result = __import__(module) 

298 except ImportError: 

299 return None 

300 identifier_parts = identifier.split(".") 

301 first_identifier = identifier_parts[0] 

302 if first_identifier != module and not module.startswith(f"{first_identifier}."): 

303 return None 

304 for name in identifier_parts[1:]: 

305 try: 

306 result = getattr(result, name) 

307 except AttributeError: 

308 return None 

309 return result 

310 

311 

312def has_tag_dict(obj: Any, tag: str) -> bool: 

313 """Helper class that tests to see if the obj is a dictionary 

314 and contains a particular key/tag. 

315 

316 >>> obj = {'test': 1} 

317 >>> has_tag(obj, 'test') 

318 True 

319 >>> has_tag(obj, 'fail') 

320 False 

321 

322 >>> has_tag(42, 'fail') 

323 False 

324 

325 """ 

326 return tag in obj 

327 

328 

329def _passthrough(value: Any) -> Any: 

330 """A function that returns its input as-is""" 

331 return value 

332 

333 

334class Unpickler: 

335 def __init__( 

336 self, 

337 backend: Optional[JSONBackend] = None, 

338 keys: bool = False, 

339 safe: bool = True, 

340 v1_decode: bool = False, 

341 on_missing: MissingHandler = "ignore", 

342 handle_readonly: bool = False, 

343 ) -> None: 

344 self.backend = backend or json 

345 self.keys = keys 

346 self.safe = safe 

347 self.v1_decode = v1_decode 

348 self.on_missing = on_missing 

349 self.handle_readonly = handle_readonly 

350 

351 self.reset() 

352 

353 def reset(self) -> None: 

354 """Resets the object's internal state.""" 

355 # Map reference names to object instances 

356 self._namedict = {} 

357 

358 # The stack of names traversed for child objects 

359 self._namestack = [] 

360 

361 # Map of objects to their index in the _objs list 

362 self._obj_to_idx = {} 

363 self._objs = [] 

364 self._proxies = [] 

365 

366 # Extra local classes not accessible globally 

367 self._classes = {} 

368 

369 def _swap_proxies(self) -> None: 

370 """Replace proxies with their corresponding instances""" 

371 for obj, attr, proxy, method in self._proxies: 

372 method(obj, attr, proxy) 

373 self._proxies = [] 

374 

375 def _restore( 

376 self, obj: Any, _passthrough: Callable[[Any], Any] = _passthrough 

377 ) -> Any: 

378 # if obj isn't in these types, neither it nor nothing in it can have a tag 

379 # don't change the tuple of types to a set, it won't work with isinstance 

380 if not isinstance(obj, (str, list, dict, set, tuple)): 

381 restore = _passthrough 

382 else: 

383 restore = self._restore_tags(obj) 

384 return restore(obj) 

385 

386 def restore( 

387 self, obj: Any, reset: bool = True, classes: Optional[ClassesType] = None 

388 ) -> Any: 

389 """Restores a flattened object to its original python state. 

390 

391 Simply returns any of the basic builtin types 

392 

393 >>> u = Unpickler() 

394 >>> u.restore('hello world') == 'hello world' 

395 True 

396 >>> u.restore({'key': 'value'}) == {'key': 'value'} 

397 True 

398 

399 """ 

400 if reset: 

401 self.reset() 

402 if classes: 

403 self.register_classes(classes) 

404 value = self._restore(obj) 

405 if reset: 

406 self._swap_proxies() 

407 return value 

408 

409 def register_classes(self, classes: ClassesType) -> None: 

410 """Register one or more classes 

411 

412 :param classes: sequence of classes or a single class to register 

413 

414 """ 

415 if isinstance(classes, (list, tuple, set)): 

416 for cls in classes: 

417 self.register_classes(cls) 

418 elif isinstance(classes, dict): 

419 self._classes.update( 

420 ( 

421 cls if isinstance(cls, str) else util.importable_name(cls), 

422 handler, 

423 ) 

424 for cls, handler in classes.items() 

425 ) 

426 else: 

427 self._classes[util.importable_name(classes)] = classes # type: ignore[arg-type] 

428 

429 def _restore_base64(self, obj: Dict[str, Any]) -> bytes: 

430 try: 

431 return util.b64decode(obj[tags.B64].encode("utf-8")) 

432 except (AttributeError, UnicodeEncodeError): 

433 return b"" 

434 

435 def _restore_base85(self, obj: Dict[str, Any]) -> bytes: 

436 try: 

437 return util.b85decode(obj[tags.B85].encode("utf-8")) 

438 except (AttributeError, UnicodeEncodeError): 

439 return b"" 

440 

441 def _refname(self) -> str: 

442 """Calculates the name of the current location in the JSON stack. 

443 

444 This is called as jsonpickle traverses the object structure to 

445 create references to previously-traversed objects. This allows 

446 cyclical data structures such as doubly-linked lists. 

447 jsonpickle ensures that duplicate python references to the same 

448 object results in only a single JSON object definition and 

449 special reference tags to represent each reference. 

450 

451 >>> u = Unpickler() 

452 >>> u._namestack = [] 

453 >>> u._refname() == '/' 

454 True 

455 >>> u._namestack = ['a'] 

456 >>> u._refname() == '/a' 

457 True 

458 >>> u._namestack = ['a', 'b'] 

459 >>> u._refname() == '/a/b' 

460 True 

461 

462 """ 

463 return "/" + "/".join(self._namestack) 

464 

465 def _mkref(self, obj: Any) -> Any: 

466 obj_id = id(obj) 

467 try: 

468 _ = self._obj_to_idx[obj_id] 

469 except KeyError: 

470 self._obj_to_idx[obj_id] = len(self._objs) 

471 self._objs.append(obj) 

472 # Backwards compatibility: old versions of jsonpickle 

473 # produced "py/ref" references. 

474 self._namedict[self._refname()] = obj 

475 return obj 

476 

477 def _restore_list(self, obj: List[Any]) -> List[Any]: 

478 parent = [] 

479 self._mkref(parent) 

480 children = [self._restore(v) for v in obj] 

481 parent.extend(children) 

482 method = _obj_setvalue 

483 proxies = [ 

484 (parent, idx, value, method) 

485 for idx, value in enumerate(parent) 

486 if isinstance(value, _Proxy) 

487 ] 

488 self._proxies.extend(proxies) 

489 return parent 

490 

491 def _restore_iterator(self, obj: Dict[str, Any]) -> Iterator[Any]: 

492 try: 

493 return iter(self._restore_list(obj[tags.ITERATOR])) 

494 except TypeError: 

495 return iter([]) 

496 

497 def _swapref(self, proxy: _Proxy, instance: Any) -> None: 

498 proxy_id = id(proxy) 

499 instance_id = id(instance) 

500 

501 instance_index = self._obj_to_idx[proxy_id] 

502 self._obj_to_idx[instance_id] = instance_index 

503 del self._obj_to_idx[proxy_id] 

504 

505 self._objs[instance_index] = instance 

506 self._namedict[self._refname()] = instance 

507 

508 def _restore_reduce(self, obj: Dict[str, Any]) -> Any: 

509 """ 

510 Supports restoring with all elements of __reduce__ as per pep 307. 

511 Assumes that iterator items (the last two) are represented as lists 

512 as per pickler implementation. 

513 """ 

514 proxy = _Proxy() 

515 self._mkref(proxy) 

516 try: 

517 reduce_val = list(map(self._restore, obj[tags.REDUCE])) 

518 except TypeError: 

519 result = [] 

520 proxy.reset(result) 

521 self._swapref(proxy, result) 

522 return result 

523 if len(reduce_val) < 5: 

524 reduce_val.extend([None] * (5 - len(reduce_val))) 

525 f, args, state, listitems, dictitems = reduce_val 

526 

527 if f == tags.NEWOBJ or getattr(f, "__name__", "") == "__newobj__": 

528 # mandated special case 

529 cls = args[0] 

530 if not isinstance(cls, type): 

531 cls = self._restore(cls) 

532 stage1 = cls.__new__(cls, *args[1:]) 

533 else: 

534 if not callable(f): 

535 result = [] 

536 proxy.reset(result) 

537 self._swapref(proxy, result) 

538 return result 

539 try: 

540 stage1 = f(*args) 

541 except TypeError: 

542 # this happens when there are missing kwargs and args don't match so we bypass 

543 # __init__ since the state dict will set all attributes immediately afterwards 

544 stage1 = f.__new__(f, *args) 

545 

546 if state: 

547 try: 

548 stage1.__setstate__(state) 

549 except AttributeError: 

550 # it's fine - we'll try the prescribed default methods 

551 try: 

552 # we can't do a straight update here because we 

553 # need object identity of the state dict to be 

554 # preserved so that _swap_proxies works out 

555 for k, v in stage1.__dict__.items(): 

556 state.setdefault(k, v) 

557 stage1.__dict__ = state 

558 except AttributeError: 

559 # next prescribed default 

560 try: 

561 for k, v in state.items(): 

562 setattr(stage1, k, v) 

563 except Exception: 

564 dict_state, slots_state = state 

565 if dict_state: 

566 stage1.__dict__.update(dict_state) 

567 if slots_state: 

568 for k, v in slots_state.items(): 

569 setattr(stage1, k, v) 

570 

571 if listitems: 

572 # should be lists if not None 

573 try: 

574 stage1.extend(listitems) 

575 except AttributeError: 

576 for x in listitems: 

577 stage1.append(x) 

578 

579 if dictitems: 

580 for k, v in dictitems: 

581 stage1.__setitem__(k, v) 

582 

583 proxy.reset(stage1) 

584 self._swapref(proxy, stage1) 

585 return stage1 

586 

587 def _restore_id(self, obj: Dict[str, Any]) -> Any: 

588 try: 

589 idx = obj[tags.ID] 

590 return self._objs[idx] 

591 except IndexError: 

592 return _IDProxy(self._objs, idx) 

593 except TypeError: 

594 return None 

595 

596 def _restore_type(self, obj: Dict[str, Any]) -> Any: 

597 typeref = util.loadclass(obj[tags.TYPE], classes=self._classes) 

598 if typeref is None: 

599 return obj 

600 return typeref 

601 

602 def _restore_module(self, obj: Dict[str, Any]) -> Any: 

603 new_obj = _loadmodule(obj[tags.MODULE]) 

604 return self._mkref(new_obj) 

605 

606 def _restore_repr_safe(self, obj: Dict[str, Any]) -> Any: 

607 new_obj = _loadmodule(obj[tags.REPR]) 

608 return self._mkref(new_obj) 

609 

610 def _restore_repr(self, obj: Dict[str, Any]) -> Any: 

611 obj = loadrepr(obj[tags.REPR]) 

612 return self._mkref(obj) 

613 

614 def _loadfactory(self, obj: Dict[str, Any]) -> Optional[Any]: 

615 try: 

616 default_factory = obj["default_factory"] 

617 except KeyError: 

618 return None 

619 del obj["default_factory"] 

620 return self._restore(default_factory) 

621 

622 def _process_missing(self, class_name: str) -> None: 

623 # most common case comes first 

624 if self.on_missing == "ignore": 

625 pass 

626 elif self.on_missing == "warn": 

627 warnings.warn("Unpickler._restore_object could not find %s!" % class_name) 

628 elif self.on_missing == "error": 

629 raise errors.ClassNotFoundError( 

630 "Unpickler.restore_object could not find %s!" % class_name # type: ignore[arg-type] 

631 ) 

632 elif util._is_function(self.on_missing): 

633 self.on_missing(class_name) # type: ignore[operator] 

634 

635 def _restore_pickled_key(self, key: str) -> Any: 

636 """Restore a possibly pickled key""" 

637 if _is_json_key(key): 

638 key = decode( 

639 key[len(tags.JSON_KEY) :], 

640 backend=self.backend, 

641 context=self, 

642 keys=True, 

643 reset=False, 

644 ) 

645 return key 

646 

647 def _restore_key_fn( 

648 self, _passthrough: Callable[[Any], Any] = _passthrough 

649 ) -> Callable[[Any], Any]: 

650 """Return a callable that restores keys 

651 

652 This function is responsible for restoring non-string keys 

653 when we are decoding with `keys=True`. 

654 

655 """ 

656 # This function is called before entering a tight loop 

657 # where the returned function will be called. 

658 # We return a specific function after checking self.keys 

659 # instead of doing so in the body of the function to 

660 # avoid conditional branching inside a tight loop. 

661 if self.keys: 

662 restore_key = self._restore_pickled_key 

663 else: 

664 restore_key = _passthrough # type: ignore[assignment] 

665 return restore_key 

666 

667 def _restore_from_dict( 

668 self, 

669 obj: Dict[str, Any], 

670 instance: Any, 

671 ignorereserved: bool = True, 

672 restore_dict_items: bool = True, 

673 ) -> Any: 

674 restore_key = self._restore_key_fn() 

675 method = _obj_setattr 

676 deferred = {} 

677 

678 for k, v in util.items(obj): 

679 # ignore the reserved attribute 

680 if ignorereserved and k in tags.RESERVED: 

681 continue 

682 if isinstance(k, (int, float)): 

683 str_k = k.__str__() 

684 else: 

685 str_k = k 

686 self._namestack.append(str_k) 

687 if restore_dict_items: 

688 k = restore_key(k) 

689 # step into the namespace 

690 value = self._restore(v) 

691 else: 

692 value = v 

693 if util._is_noncomplex(instance) or util._is_dictionary_subclass(instance): 

694 try: 

695 if k == "__dict__": 

696 setattr(instance, k, value) 

697 else: 

698 instance[k] = value 

699 except TypeError: 

700 # Immutable object, must be constructed in one shot 

701 if k != "__dict__": 

702 deferred[k] = value 

703 self._namestack.pop() 

704 continue 

705 else: 

706 if not k.startswith("__"): 

707 try: 

708 setattr(instance, k, value) 

709 except KeyError: 

710 # certain numpy objects require us to prepend a _ to the var 

711 # this should go in the np handler but I think this could be 

712 # useful for other code 

713 setattr(instance, f"_{k}", value) 

714 except dataclasses.FrozenInstanceError: 

715 # issue #240 

716 # i think this is the only way to set frozen dataclass attrs 

717 object.__setattr__(instance, k, value) 

718 except AttributeError as e: 

719 # some objects raise this for read-only attributes (#422) (#478) 

720 if ( 

721 hasattr(instance, "__slots__") 

722 and not len(instance.__slots__) 

723 # we have to handle this separately because of +483 

724 and issubclass(instance.__class__, (int, str)) 

725 and self.handle_readonly 

726 ): 

727 continue 

728 raise e 

729 else: 

730 setattr(instance, f"_{instance.__class__.__name__}{k}", value) 

731 

732 # This instance has an instance variable named `k` that is 

733 # currently a proxy and must be replaced 

734 if isinstance(value, _Proxy): 

735 self._proxies.append((instance, k, value, method)) 

736 

737 # step out 

738 self._namestack.pop() 

739 

740 if deferred: 

741 # SQLAlchemy Immutable mappings must be constructed in one shot 

742 instance = instance.__class__(deferred) 

743 

744 return instance 

745 

746 def _restore_state(self, obj: Dict[str, Any], instance: Any) -> Any: 

747 state = self._restore(obj[tags.STATE]) 

748 has_slots = ( 

749 isinstance(state, tuple) and len(state) == 2 and isinstance(state[1], dict) 

750 ) 

751 has_slots_and_dict = has_slots and isinstance(state[0], dict) 

752 if hasattr(instance, "__setstate__"): 

753 instance.__setstate__(state) 

754 elif isinstance(state, dict): 

755 # implements described default handling 

756 # of state for object with instance dict 

757 # and no slots 

758 instance = self._restore_from_dict( 

759 state, instance, ignorereserved=False, restore_dict_items=False 

760 ) 

761 elif has_slots: 

762 instance = self._restore_from_dict( 

763 state[1], instance, ignorereserved=False, restore_dict_items=False 

764 ) 

765 if has_slots_and_dict: 

766 instance = self._restore_from_dict( 

767 state[0], instance, ignorereserved=False, restore_dict_items=False 

768 ) 

769 elif not hasattr(instance, "__getnewargs__") and not hasattr( 

770 instance, "__getnewargs_ex__" 

771 ): 

772 # __setstate__ is not implemented so that means that the best 

773 # we can do is return the result of __getstate__() rather than 

774 # return an empty shell of an object. 

775 # However, if there were newargs, it's not an empty shell 

776 instance = state 

777 return instance 

778 

779 def _restore_object_instance_variables( 

780 self, obj: Dict[str, Any], instance: Any 

781 ) -> Any: 

782 instance = self._restore_from_dict(obj, instance) 

783 

784 # Handle list and set subclasses 

785 if has_tag(obj, tags.SEQ): 

786 if hasattr(instance, "append"): 

787 for v in obj[tags.SEQ]: 

788 instance.append(self._restore(v)) 

789 elif hasattr(instance, "add"): 

790 for v in obj[tags.SEQ]: 

791 instance.add(self._restore(v)) 

792 

793 if has_tag(obj, tags.STATE): 

794 instance = self._restore_state(obj, instance) 

795 

796 return instance 

797 

798 def _restore_object_instance( 

799 self, obj: Dict[str, Any], cls: Type[Any], class_name: str = "" 

800 ) -> Any: 

801 # This is a placeholder proxy object which allows child objects to 

802 # reference the parent object before it has been instantiated. 

803 proxy = _Proxy() 

804 self._mkref(proxy) 

805 

806 # An object can install itself as its own factory, so load the factory 

807 # after the instance is available for referencing. 

808 factory = self._loadfactory(obj) 

809 

810 if has_tag(obj, tags.NEWARGSEX): 

811 args, kwargs = obj[tags.NEWARGSEX] 

812 else: 

813 args = getargs(obj, classes=self._classes) 

814 kwargs = {} 

815 if args: 

816 args = self._restore(args) 

817 if kwargs: 

818 kwargs = self._restore(kwargs) 

819 

820 is_oldstyle = not (isinstance(cls, type) or getattr(cls, "__meta__", None)) 

821 try: 

822 if not is_oldstyle and hasattr(cls, "__new__"): 

823 # new style classes 

824 if factory: 

825 instance = cls.__new__(cls, factory, *args, **kwargs) 

826 instance.default_factory = factory 

827 else: 

828 instance = cls.__new__(cls, *args, **kwargs) 

829 else: 

830 instance = object.__new__(cls) 

831 except TypeError: # old-style classes 

832 is_oldstyle = True 

833 

834 if is_oldstyle: 

835 try: 

836 instance = cls(*args) 

837 except TypeError: # fail gracefully 

838 try: 

839 instance = make_blank_classic(cls) 

840 except Exception: # fail gracefully 

841 self._process_missing(class_name) 

842 return self._mkref(obj) 

843 

844 proxy.reset(instance) 

845 self._swapref(proxy, instance) 

846 

847 if isinstance(instance, tuple): 

848 return instance 

849 

850 instance = self._restore_object_instance_variables(obj, instance) 

851 

852 if _safe_hasattr(instance, "default_factory") and isinstance( 

853 instance.default_factory, _Proxy 

854 ): 

855 instance.default_factory = instance.default_factory.get() 

856 

857 return instance 

858 

859 def _restore_object(self, obj: Dict[str, Any]) -> Any: 

860 class_name = obj[tags.OBJECT] 

861 cls = util.loadclass(class_name, classes=self._classes) 

862 handler = handlers.get(cls, handlers.get(class_name)) # type: ignore[arg-type] 

863 if handler is not None: # custom handler 

864 proxy = _Proxy() 

865 self._mkref(proxy) 

866 instance = handler(self).restore(obj) 

867 proxy.reset(instance) 

868 self._swapref(proxy, instance) 

869 return instance 

870 

871 if cls is None: 

872 self._process_missing(class_name) 

873 return self._mkref(obj) 

874 

875 return self._restore_object_instance(obj, cls, class_name) 

876 

877 def _restore_function(self, obj: Dict[str, Any]) -> Any: 

878 return util.loadclass(obj[tags.FUNCTION], classes=self._classes) 

879 

880 def _restore_set(self, obj: Dict[str, Any]) -> Set[Any]: 

881 try: 

882 return {self._restore(v) for v in obj[tags.SET]} 

883 except TypeError: 

884 return set() 

885 

886 def _restore_dict(self, obj: Dict[str, Any]) -> Dict[str, Any]: 

887 data = {} 

888 if not self.v1_decode: 

889 self._mkref(data) 

890 

891 # If we are decoding dicts that can have non-string keys then we 

892 # need to do a two-phase decode where the non-string keys are 

893 # processed last. This ensures a deterministic order when 

894 # assigning object IDs for references. 

895 if self.keys: 

896 # Phase 1: regular non-special keys. 

897 for k, v in util.items(obj): 

898 if _is_json_key(k): 

899 continue 

900 if isinstance(k, (int, float)): 

901 str_k = k.__str__() 

902 else: 

903 str_k = k 

904 self._namestack.append(str_k) 

905 data[k] = self._restore(v) 

906 

907 self._namestack.pop() 

908 

909 # Phase 2: object keys only. 

910 for k, v in util.items(obj): 

911 if not _is_json_key(k): 

912 continue 

913 self._namestack.append(k) 

914 

915 k = self._restore_pickled_key(k) 

916 data[k] = result = self._restore(v) 

917 # k is currently a proxy and must be replaced 

918 if isinstance(result, _Proxy): 

919 self._proxies.append((data, k, result, _obj_setvalue)) 

920 

921 self._namestack.pop() 

922 else: 

923 # No special keys, thus we don't need to restore the keys either. 

924 for k, v in util.items(obj): 

925 if isinstance(k, (int, float)): 

926 str_k = k.__str__() 

927 else: 

928 str_k = k 

929 self._namestack.append(str_k) 

930 data[k] = result = self._restore(v) 

931 if isinstance(result, _Proxy): 

932 self._proxies.append((data, k, result, _obj_setvalue)) 

933 self._namestack.pop() 

934 return data 

935 

936 def _restore_tuple(self, obj: Dict[str, Any]) -> Tuple[Any, ...]: 

937 try: 

938 return tuple(self._restore(v) for v in obj[tags.TUPLE]) 

939 except TypeError: 

940 return () 

941 

942 def _restore_tags( 

943 self, obj: Any, _passthrough: Callable[[Any], Any] = _passthrough 

944 ) -> Callable[[Any], Any]: 

945 """Return the restoration function for the specified object""" 

946 try: 

947 if not tags.RESERVED <= set(obj) and type(obj) not in (list, dict): 

948 return _passthrough 

949 except TypeError: 

950 pass 

951 if type(obj) is dict: 

952 if tags.TUPLE in obj: 

953 restore = self._restore_tuple 

954 elif tags.SET in obj: 

955 restore = self._restore_set # type: ignore[assignment] 

956 elif tags.B64 in obj: 

957 restore = self._restore_base64 # type: ignore[assignment] 

958 elif tags.B85 in obj: 

959 restore = self._restore_base85 # type: ignore[assignment] 

960 elif tags.ID in obj: 

961 restore = self._restore_id 

962 elif tags.ITERATOR in obj: 

963 restore = self._restore_iterator # type: ignore[assignment] 

964 elif tags.OBJECT in obj: 

965 restore = self._restore_object 

966 elif tags.TYPE in obj: 

967 restore = self._restore_type 

968 elif tags.REDUCE in obj: 

969 restore = self._restore_reduce 

970 elif tags.FUNCTION in obj: 

971 restore = self._restore_function 

972 elif tags.MODULE in obj: 

973 restore = self._restore_module 

974 elif tags.REPR in obj: 

975 if self.safe: 

976 restore = self._restore_repr_safe 

977 else: 

978 restore = self._restore_repr 

979 else: 

980 restore = self._restore_dict # type: ignore[assignment] 

981 elif type(obj) is list: 

982 restore = self._restore_list # type: ignore[assignment] 

983 else: 

984 restore = _passthrough # type: ignore[assignment] 

985 return restore