Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.11/site-packages/jsonpickle/unpickler.py: 68%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

512 statements  

1# Copyright (C) 2008 John Paulett (john -at- paulett.org) 

2# Copyright (C) 2009-2024 David Aguilar (davvid -at- gmail.com) 

3# All rights reserved. 

4# 

5# This software is licensed as described in the file COPYING, which 

6# you should have received as part of this distribution. 

7import dataclasses 

8import warnings 

9from typing import ( 

10 Any, 

11 Callable, 

12 Dict, 

13 Iterator, 

14 List, 

15 Optional, 

16 Sequence, 

17 Set, 

18 Tuple, 

19 Type, 

20 Union, 

21) 

22 

23from . import errors, handlers, tags, util 

24from .backend import JSONBackend, json 

25 

26# class names to class objects (or sequence of classes) 

27ClassesType = Optional[Union[Type[Any], Dict[str, Type[Any]], Sequence[Type[Any]]]] 

28# handler for missing classes: either a policy name or a callback 

29MissingHandler = Union[str, Callable[[str], Any]] 

30 

31 

32def decode( 

33 string: str, 

34 backend: Optional[JSONBackend] = None, 

35 # we get a lot of errors when typing with TypeVar 

36 context: Optional["Unpickler"] = None, 

37 keys: bool = False, 

38 reset: bool = True, 

39 safe: bool = True, 

40 classes: Optional[ClassesType] = None, 

41 v1_decode: bool = False, 

42 on_missing: MissingHandler = "ignore", 

43 handle_readonly: bool = False, 

44 handler_context: Any = None, 

45) -> Any: 

46 """Convert a JSON string into a Python object. 

47 

48 :param backend: If set to an instance of jsonpickle.backend.JSONBackend, jsonpickle 

49 will use that backend for deserialization. 

50 

51 :param context: Supply a pre-built Pickler or Unpickler object to the 

52 `jsonpickle.encode` and `jsonpickle.decode` machinery instead 

53 of creating a new instance. The `context` represents the currently 

54 active Pickler and Unpickler objects when custom handlers are 

55 invoked by jsonpickle. 

56 

57 :param keys: If set to True then jsonpickle will decode non-string dictionary keys 

58 into python objects via the jsonpickle protocol. 

59 

60 :param reset: Custom pickle handlers that use the `Pickler.flatten` method or 

61 `jsonpickle.encode` function must call `encode` with `reset=False` 

62 in order to retain object references during pickling. 

63 This flag is not typically used outside of a custom handler or 

64 `__getstate__` implementation. 

65 

66 :param safe: If set to ``False``, use of ``eval()`` for backwards-compatible (pre-0.7.0) 

67 deserialization of repr-serialized objects is enabled. Defaults to ``True``. 

68 The default value was ``False`` in jsonpickle v3 and changed to ``True`` in jsonpickle v4. 

69 

70 .. warning:: 

71 

72 ``eval()`` is used when set to ``False`` and is not secure against 

73 malicious inputs. You should avoid setting ``safe=False``. 

74 

75 :param classes: If set to a single class, or a sequence (list, set, tuple) of 

76 classes, then the classes will be made available when constructing objects. 

77 If set to a dictionary of class names to class objects, the class object 

78 will be provided to jsonpickle to deserialize the class name into. 

79 This can be used to give jsonpickle access to local classes that are not 

80 available through the global module import scope, and the dict method can 

81 be used to deserialize encoded objects into a new class. An example of using 

82 this argument can be found in examples/changing_class_path.py on GitHub. 

83 

84 :param v1_decode: If set to True it enables you to decode objects serialized in 

85 jsonpickle v1. Please do not attempt to re-encode the objects in the v1 format! 

86 Version 2's format fixes issue #255, and allows dictionary identity to be 

87 preserved through an encode/decode cycle. 

88 

89 :param on_missing: If set to 'error', it will raise an error if the class it's 

90 decoding is not found. If set to 'warn', it will warn you in said case. 

91 If set to a non-awaitable function, it will call said callback function 

92 with the class name (a string) as the only parameter. Strings passed to 

93 `on_missing` are lowercased automatically. 

94 

95 :param handle_readonly: If set to True, the Unpickler will handle objects encoded 

96 with 'handle_readonly' properly. Do not set this flag for objects not encoded 

97 with 'handle_readonly' set to True. 

98 

99 :param handler_context: 

100 Pass custom context to a custom handler. This can be used to customize 

101 behavior at runtime based off data. Defaults to ``None``. An example can 

102 be found in the examples/ directory on GitHub. 

103 

104 >>> decode('"my string"') == 'my string' 

105 True 

106 >>> decode('36') 

107 36 

108 """ 

109 

110 if isinstance(on_missing, str): 

111 on_missing = on_missing.lower() 

112 elif not util._is_function(on_missing): 

113 warnings.warn( 

114 "Unpickler.on_missing must be a string or a function! It will be ignored!" 

115 ) 

116 

117 backend = backend or json 

118 is_ephemeral_context = context is None 

119 context = context or Unpickler( 

120 keys=keys, 

121 backend=backend, 

122 safe=safe, 

123 v1_decode=v1_decode, 

124 on_missing=on_missing, 

125 handle_readonly=handle_readonly, 

126 handler_context=handler_context, 

127 ) 

128 if handler_context is not None: 

129 context.handler_context = handler_context 

130 data = backend.decode(string) 

131 result = context.restore(data, reset=reset, classes=classes) 

132 if is_ephemeral_context: 

133 # Avoid holding onto references to external objects, which can 

134 # prevent garbage collection from occuring. 

135 context.reset() 

136 return result 

137 

138 

139def _safe_hasattr(obj: Any, attr: str) -> bool: 

140 """Workaround unreliable hasattr() availability on sqlalchemy objects""" 

141 try: 

142 object.__getattribute__(obj, attr) 

143 return True 

144 except AttributeError: 

145 return False 

146 

147 

148def _is_json_key(key: Any) -> bool: 

149 """Has this key a special object that has been encoded to JSON?""" 

150 return isinstance(key, str) and key.startswith(tags.JSON_KEY) 

151 

152 

153class _Proxy: 

154 """Proxies are dummy objects that are later replaced by real instances 

155 

156 The `restore()` function has to solve a tricky problem when pickling 

157 objects with cyclical references -- the parent instance does not yet 

158 exist. 

159 

160 The problem is that `__getnewargs__()`, `__getstate__()`, custom handlers, 

161 and cyclical objects graphs are allowed to reference the yet-to-be-created 

162 object via the referencing machinery. 

163 

164 In other words, objects are allowed to depend on themselves for 

165 construction! 

166 

167 We solve this problem by placing dummy Proxy objects into the referencing 

168 machinery so that we can construct the child objects before constructing 

169 the parent. Objects are initially created with Proxy attribute values 

170 instead of real references. 

171 

172 We collect all objects that contain references to proxies and run 

173 a final sweep over them to swap in the real instance. This is done 

174 at the very end of the top-level `restore()`. 

175 

176 The `instance` attribute below is replaced with the real instance 

177 after `__new__()` has been used to construct the object and is used 

178 when swapping proxies with real instances. 

179 

180 """ 

181 

182 def __init__(self) -> None: 

183 self.instance = None 

184 

185 def get(self) -> Any: 

186 return self.instance 

187 

188 def reset(self, instance: Any) -> None: 

189 self.instance = instance 

190 

191 

192class _IDProxy(_Proxy): 

193 def __init__(self, objs: List[Any], index: int) -> None: 

194 self._index = index 

195 self._objs = objs 

196 

197 def get(self) -> Any: 

198 try: 

199 return self._objs[self._index] 

200 except IndexError: 

201 return None 

202 

203 

204def _obj_setattr(obj: Any, attr: str, proxy: _Proxy) -> None: 

205 """Use setattr to update a proxy entry""" 

206 setattr(obj, attr, proxy.get()) 

207 

208 

209def _obj_setvalue(obj: Any, idx: Any, proxy: _Proxy) -> None: 

210 """Use obj[key] assignments to update a proxy entry""" 

211 obj[idx] = proxy.get() 

212 

213 

214def has_tag(obj: Any, tag: str) -> bool: 

215 """Helper class that tests to see if the obj is a dictionary 

216 and contains a particular key/tag. 

217 

218 >>> obj = {'test': 1} 

219 >>> has_tag(obj, 'test') 

220 True 

221 >>> has_tag(obj, 'fail') 

222 False 

223 

224 >>> has_tag(42, 'fail') 

225 False 

226 

227 """ 

228 return type(obj) is dict and tag in obj 

229 

230 

231def getargs(obj: Dict[str, Any], classes: Optional[Dict[str, Type[Any]]] = None) -> Any: 

232 """Return arguments suitable for __new__()""" 

233 # Let saved newargs take precedence over everything 

234 if has_tag(obj, tags.NEWARGSEX): 

235 raise ValueError("__newargs_ex__ returns both args and kwargs") 

236 

237 if has_tag(obj, tags.NEWARGS): 

238 return obj[tags.NEWARGS] 

239 

240 if has_tag(obj, tags.INITARGS): 

241 return obj[tags.INITARGS] 

242 

243 try: 

244 seq_list = obj[tags.SEQ] 

245 obj_dict = obj[tags.OBJECT] 

246 except KeyError: 

247 return [] 

248 typeref = util.loadclass(obj_dict, classes=classes) 

249 if not typeref: 

250 return [] 

251 if hasattr(typeref, "_fields"): 

252 if len(typeref._fields) == len(seq_list): 

253 return seq_list 

254 return [] 

255 

256 

257class _trivialclassic: 

258 """ 

259 A trivial class that can be instantiated with no args 

260 """ 

261 

262 

263def make_blank_classic(cls: Type[Any]) -> Any: 

264 """ 

265 Implement the mandated strategy for dealing with classic classes 

266 which cannot be instantiated without __getinitargs__ because they 

267 take parameters 

268 """ 

269 instance = _trivialclassic() 

270 instance.__class__ = cls 

271 return instance 

272 

273 

274def loadrepr(reprstr: str) -> Any: 

275 """Returns an instance of the object from the object's repr() string. 

276 It involves the dynamic specification of code. 

277 

278 .. warning:: 

279 

280 This function is unsafe and uses `eval()`. 

281 

282 >>> obj = loadrepr('datetime/datetime.datetime.now()') 

283 >>> obj.__class__.__name__ 

284 'datetime' 

285 

286 """ 

287 module, evalstr = reprstr.split("/") 

288 mylocals = locals() 

289 localname = module 

290 if "." in localname: 

291 localname = module.split(".", 1)[0] 

292 mylocals[localname] = __import__(module) 

293 return eval(evalstr, mylocals) 

294 

295 

296def _loadmodule(module_str: str) -> Optional[Any]: 

297 """Returns a reference to a module. 

298 

299 >>> fn = _loadmodule('datetime/datetime.datetime.fromtimestamp') 

300 >>> fn.__name__ 

301 'fromtimestamp' 

302 

303 """ 

304 module, identifier = module_str.split("/") 

305 try: 

306 result = __import__(module) 

307 except ImportError: 

308 return None 

309 identifier_parts = identifier.split(".") 

310 first_identifier = identifier_parts[0] 

311 if first_identifier != module and not module.startswith(f"{first_identifier}."): 

312 return None 

313 for name in identifier_parts[1:]: 

314 try: 

315 result = getattr(result, name) 

316 except AttributeError: 

317 return None 

318 return result 

319 

320 

321def has_tag_dict(obj: Any, tag: str) -> bool: 

322 """Helper class that tests to see if the obj is a dictionary 

323 and contains a particular key/tag. 

324 

325 >>> obj = {'test': 1} 

326 >>> has_tag(obj, 'test') 

327 True 

328 >>> has_tag(obj, 'fail') 

329 False 

330 

331 >>> has_tag(42, 'fail') 

332 False 

333 

334 """ 

335 return tag in obj 

336 

337 

338def _passthrough(value: Any) -> Any: 

339 """A function that returns its input as-is""" 

340 return value 

341 

342 

343class Unpickler: 

344 def __init__( 

345 self, 

346 backend: Optional[JSONBackend] = None, 

347 keys: bool = False, 

348 safe: bool = True, 

349 v1_decode: bool = False, 

350 on_missing: MissingHandler = "ignore", 

351 handle_readonly: bool = False, 

352 handler_context: Any = None, 

353 ) -> None: 

354 self.backend = backend or json 

355 self.keys = keys 

356 self.safe = safe 

357 self.v1_decode = v1_decode 

358 self.on_missing = on_missing 

359 self.handle_readonly = handle_readonly 

360 # Custom context passed through to custom handlers, see #452 

361 self.handler_context = handler_context 

362 

363 self.reset() 

364 

365 def reset(self) -> None: 

366 """Resets the object's internal state.""" 

367 # Map reference names to object instances 

368 self._namedict = {} 

369 

370 # The stack of names traversed for child objects 

371 self._namestack = [] 

372 

373 # Map of objects to their index in the _objs list 

374 self._obj_to_idx = {} 

375 self._objs = [] 

376 self._proxies = [] 

377 

378 # Extra local classes not accessible globally 

379 self._classes = {} 

380 

381 def _swap_proxies(self) -> None: 

382 """Replace proxies with their corresponding instances""" 

383 for obj, attr, proxy, method in self._proxies: 

384 method(obj, attr, proxy) 

385 self._proxies = [] 

386 

387 def _restore( 

388 self, obj: Any, _passthrough: Callable[[Any], Any] = _passthrough 

389 ) -> Any: 

390 # if obj isn't in these types, neither it nor nothing in it can have a tag 

391 # don't change the tuple of types to a set, it won't work with isinstance 

392 if not isinstance(obj, (str, list, dict, set, tuple)): 

393 restore = _passthrough 

394 else: 

395 restore = self._restore_tags(obj) 

396 return restore(obj) 

397 

398 def restore( 

399 self, obj: Any, reset: bool = True, classes: Optional[ClassesType] = None 

400 ) -> Any: 

401 """Restores a flattened object to its original python state. 

402 

403 Simply returns any of the basic builtin types 

404 

405 >>> u = Unpickler() 

406 >>> u.restore('hello world') == 'hello world' 

407 True 

408 >>> u.restore({'key': 'value'}) == {'key': 'value'} 

409 True 

410 

411 """ 

412 if reset: 

413 self.reset() 

414 if classes: 

415 self.register_classes(classes) 

416 value = self._restore(obj) 

417 if reset: 

418 self._swap_proxies() 

419 return value 

420 

421 def register_classes(self, classes: ClassesType) -> None: 

422 """Register one or more classes 

423 

424 :param classes: sequence of classes or a single class to register 

425 

426 """ 

427 if isinstance(classes, (list, tuple, set)): 

428 for cls in classes: 

429 self.register_classes(cls) 

430 elif isinstance(classes, dict): 

431 self._classes.update( 

432 ( 

433 cls if isinstance(cls, str) else util.importable_name(cls), 

434 handler, 

435 ) 

436 for cls, handler in classes.items() 

437 ) 

438 else: 

439 self._classes[util.importable_name(classes)] = classes # type: ignore[arg-type] 

440 

441 def _restore_base64(self, obj: Dict[str, Any]) -> bytes: 

442 try: 

443 return util.b64decode(obj[tags.B64].encode("utf-8")) 

444 except (AttributeError, UnicodeEncodeError): 

445 return b"" 

446 

447 def _restore_base85(self, obj: Dict[str, Any]) -> bytes: 

448 try: 

449 return util.b85decode(obj[tags.B85].encode("utf-8")) 

450 except (AttributeError, UnicodeEncodeError): 

451 return b"" 

452 

453 def _refname(self) -> str: 

454 """Calculates the name of the current location in the JSON stack. 

455 

456 This is called as jsonpickle traverses the object structure to 

457 create references to previously-traversed objects. This allows 

458 cyclical data structures such as doubly-linked lists. 

459 jsonpickle ensures that duplicate python references to the same 

460 object results in only a single JSON object definition and 

461 special reference tags to represent each reference. 

462 

463 >>> u = Unpickler() 

464 >>> u._namestack = [] 

465 >>> u._refname() == '/' 

466 True 

467 >>> u._namestack = ['a'] 

468 >>> u._refname() == '/a' 

469 True 

470 >>> u._namestack = ['a', 'b'] 

471 >>> u._refname() == '/a/b' 

472 True 

473 

474 """ 

475 return "/" + "/".join(self._namestack) 

476 

477 def _mkref(self, obj: Any) -> Any: 

478 obj_id = id(obj) 

479 try: 

480 _ = self._obj_to_idx[obj_id] 

481 except KeyError: 

482 self._obj_to_idx[obj_id] = len(self._objs) 

483 self._objs.append(obj) 

484 # Backwards compatibility: old versions of jsonpickle 

485 # produced "py/ref" references. 

486 self._namedict[self._refname()] = obj 

487 return obj 

488 

489 def _restore_list(self, obj: List[Any]) -> List[Any]: 

490 parent = [] 

491 self._mkref(parent) 

492 children = [self._restore(v) for v in obj] 

493 parent.extend(children) 

494 method = _obj_setvalue 

495 proxies = [ 

496 (parent, idx, value, method) 

497 for idx, value in enumerate(parent) 

498 if isinstance(value, _Proxy) 

499 ] 

500 self._proxies.extend(proxies) 

501 return parent 

502 

503 def _restore_iterator(self, obj: Dict[str, Any]) -> Iterator[Any]: 

504 try: 

505 return iter(self._restore_list(obj[tags.ITERATOR])) 

506 except TypeError: 

507 return iter([]) 

508 

509 def _swapref(self, proxy: _Proxy, instance: Any) -> None: 

510 proxy_id = id(proxy) 

511 instance_id = id(instance) 

512 

513 instance_index = self._obj_to_idx[proxy_id] 

514 self._obj_to_idx[instance_id] = instance_index 

515 del self._obj_to_idx[proxy_id] 

516 

517 self._objs[instance_index] = instance 

518 self._namedict[self._refname()] = instance 

519 

520 def _restore_reduce(self, obj: Dict[str, Any]) -> Any: 

521 """ 

522 Supports restoring with all elements of __reduce__ as per pep 307. 

523 Assumes that iterator items (the last two) are represented as lists 

524 as per pickler implementation. 

525 """ 

526 proxy = _Proxy() 

527 self._mkref(proxy) 

528 try: 

529 reduce_val = list(map(self._restore, obj[tags.REDUCE])) 

530 except TypeError: 

531 result = [] 

532 proxy.reset(result) 

533 self._swapref(proxy, result) 

534 return result 

535 if len(reduce_val) < 5: 

536 reduce_val.extend([None] * (5 - len(reduce_val))) 

537 f, args, state, listitems, dictitems = reduce_val 

538 

539 if f == tags.NEWOBJ or getattr(f, "__name__", "") == "__newobj__": 

540 # mandated special case 

541 cls = args[0] 

542 if not isinstance(cls, type): 

543 cls = self._restore(cls) 

544 stage1 = cls.__new__(cls, *args[1:]) 

545 else: 

546 if not callable(f): 

547 result = [] 

548 proxy.reset(result) 

549 self._swapref(proxy, result) 

550 return result 

551 try: 

552 stage1 = f(*args) 

553 except TypeError: 

554 # this happens when there are missing kwargs and args don't match so we bypass 

555 # __init__ since the state dict will set all attributes immediately afterwards 

556 stage1 = f.__new__(f, *args) 

557 

558 if state: 

559 try: 

560 stage1.__setstate__(state) 

561 except AttributeError: 

562 # it's fine - we'll try the prescribed default methods 

563 try: 

564 # we can't do a straight update here because we 

565 # need object identity of the state dict to be 

566 # preserved so that _swap_proxies works out 

567 for k, v in stage1.__dict__.items(): 

568 state.setdefault(k, v) 

569 stage1.__dict__ = state 

570 except AttributeError: 

571 # next prescribed default 

572 try: 

573 for k, v in state.items(): 

574 setattr(stage1, k, v) 

575 except Exception: 

576 dict_state, slots_state = state 

577 if dict_state: 

578 stage1.__dict__.update(dict_state) 

579 if slots_state: 

580 for k, v in slots_state.items(): 

581 setattr(stage1, k, v) 

582 

583 if listitems: 

584 # should be lists if not None 

585 try: 

586 stage1.extend(listitems) 

587 except AttributeError: 

588 for x in listitems: 

589 stage1.append(x) 

590 

591 if dictitems: 

592 for k, v in dictitems: 

593 stage1.__setitem__(k, v) 

594 

595 proxy.reset(stage1) 

596 self._swapref(proxy, stage1) 

597 return stage1 

598 

599 def _restore_id(self, obj: Dict[str, Any]) -> Any: 

600 try: 

601 idx = obj[tags.ID] 

602 return self._objs[idx] 

603 except IndexError: 

604 return _IDProxy(self._objs, idx) 

605 except TypeError: 

606 return None 

607 

608 def _restore_type(self, obj: Dict[str, Any]) -> Any: 

609 typeref = util.loadclass(obj[tags.TYPE], classes=self._classes) 

610 if typeref is None: 

611 return obj 

612 return typeref 

613 

614 def _restore_module(self, obj: Dict[str, Any]) -> Any: 

615 new_obj = _loadmodule(obj[tags.MODULE]) 

616 return self._mkref(new_obj) 

617 

618 def _restore_repr_safe(self, obj: Dict[str, Any]) -> Any: 

619 new_obj = _loadmodule(obj[tags.REPR]) 

620 return self._mkref(new_obj) 

621 

622 def _restore_repr(self, obj: Dict[str, Any]) -> Any: 

623 obj = loadrepr(obj[tags.REPR]) 

624 return self._mkref(obj) 

625 

626 def _loadfactory(self, obj: Dict[str, Any]) -> Optional[Any]: 

627 default_factory = None 

628 for key in (tags.DEFAULT_FACTORY, "default_factory"): 

629 try: 

630 default_factory = obj.pop(key) 

631 break 

632 except KeyError: 

633 continue 

634 if default_factory is None: 

635 return None 

636 return self._restore(default_factory) 

637 

638 def _process_missing(self, class_name: str) -> None: 

639 # most common case comes first 

640 if self.on_missing == "ignore": 

641 pass 

642 elif self.on_missing == "warn": 

643 warnings.warn("Unpickler._restore_object could not find %s!" % class_name) 

644 elif self.on_missing == "error": 

645 raise errors.ClassNotFoundError( 

646 "Unpickler.restore_object could not find %s!" % class_name # type: ignore[arg-type] 

647 ) 

648 elif util._is_function(self.on_missing): 

649 self.on_missing(class_name) # type: ignore[operator] 

650 

651 def _restore_pickled_key(self, key: str) -> Any: 

652 """Restore a possibly pickled key""" 

653 if _is_json_key(key): 

654 key = decode( 

655 key[len(tags.JSON_KEY) :], 

656 backend=self.backend, 

657 context=self, 

658 keys=True, 

659 reset=False, 

660 ) 

661 return key 

662 

663 def _restore_key_fn( 

664 self, _passthrough: Callable[[Any], Any] = _passthrough 

665 ) -> Callable[[Any], Any]: 

666 """Return a callable that restores keys 

667 

668 This function is responsible for restoring non-string keys 

669 when we are decoding with `keys=True`. 

670 

671 """ 

672 # This function is called before entering a tight loop 

673 # where the returned function will be called. 

674 # We return a specific function after checking self.keys 

675 # instead of doing so in the body of the function to 

676 # avoid conditional branching inside a tight loop. 

677 if self.keys: 

678 restore_key = self._restore_pickled_key 

679 else: 

680 restore_key = _passthrough # type: ignore[assignment] 

681 return restore_key 

682 

683 def _restore_from_dict( 

684 self, 

685 obj: Dict[str, Any], 

686 instance: Any, 

687 ignorereserved: bool = True, 

688 restore_dict_items: bool = True, 

689 ) -> Any: 

690 restore_key = self._restore_key_fn() 

691 method = _obj_setattr 

692 deferred = {} 

693 

694 for k, v in util.items(obj): 

695 # ignore the reserved attribute 

696 if ignorereserved and k in tags.RESERVED: 

697 continue 

698 if isinstance(k, (int, float)): 

699 str_k = k.__str__() 

700 else: 

701 str_k = k 

702 self._namestack.append(str_k) 

703 if restore_dict_items: 

704 k = restore_key(k) 

705 # step into the namespace 

706 value = self._restore(v) 

707 else: 

708 value = v 

709 if util._is_noncomplex(instance) or util._is_dictionary_subclass(instance): 

710 try: 

711 if k == "__dict__": 

712 setattr(instance, k, value) 

713 else: 

714 instance[k] = value 

715 except TypeError: 

716 # Immutable object, must be constructed in one shot 

717 if k != "__dict__": 

718 deferred[k] = value 

719 self._namestack.pop() 

720 continue 

721 else: 

722 if not k.startswith("__"): 

723 try: 

724 setattr(instance, k, value) 

725 except KeyError: 

726 # certain numpy objects require us to prepend a _ to the var 

727 # this should go in the np handler but I think this could be 

728 # useful for other code 

729 setattr(instance, f"_{k}", value) 

730 except dataclasses.FrozenInstanceError: 

731 # issue #240 

732 # i think this is the only way to set frozen dataclass attrs 

733 object.__setattr__(instance, k, value) 

734 except AttributeError as e: 

735 # some objects raise this for read-only attributes (#422) (#478) 

736 if ( 

737 hasattr(instance, "__slots__") 

738 and not len(instance.__slots__) 

739 # we have to handle this separately because of +483 

740 and issubclass(instance.__class__, (int, str)) 

741 and self.handle_readonly 

742 ): 

743 continue 

744 raise e 

745 else: 

746 setattr(instance, f"_{instance.__class__.__name__}{k}", value) 

747 

748 # This instance has an instance variable named `k` that is 

749 # currently a proxy and must be replaced 

750 if isinstance(value, _Proxy): 

751 self._proxies.append((instance, k, value, method)) 

752 

753 # step out 

754 self._namestack.pop() 

755 

756 if deferred: 

757 # SQLAlchemy Immutable mappings must be constructed in one shot 

758 instance = instance.__class__(deferred) 

759 

760 return instance 

761 

762 def _restore_state(self, obj: Dict[str, Any], instance: Any) -> Any: 

763 state = self._restore(obj[tags.STATE]) 

764 has_slots = ( 

765 isinstance(state, tuple) and len(state) == 2 and isinstance(state[1], dict) 

766 ) 

767 has_slots_and_dict = has_slots and isinstance(state[0], dict) 

768 if hasattr(instance, "__setstate__"): 

769 instance.__setstate__(state) 

770 elif isinstance(state, dict): 

771 # implements described default handling 

772 # of state for object with instance dict 

773 # and no slots 

774 instance = self._restore_from_dict( 

775 state, instance, ignorereserved=False, restore_dict_items=False 

776 ) 

777 elif has_slots: 

778 instance = self._restore_from_dict( 

779 state[1], instance, ignorereserved=False, restore_dict_items=False 

780 ) 

781 if has_slots_and_dict: 

782 instance = self._restore_from_dict( 

783 state[0], instance, ignorereserved=False, restore_dict_items=False 

784 ) 

785 elif not hasattr(instance, "__getnewargs__") and not hasattr( 

786 instance, "__getnewargs_ex__" 

787 ): 

788 # __setstate__ is not implemented so that means that the best 

789 # we can do is return the result of __getstate__() rather than 

790 # return an empty shell of an object. 

791 # However, if there were newargs, it's not an empty shell 

792 instance = state 

793 return instance 

794 

795 def _restore_object_instance_variables( 

796 self, obj: Dict[str, Any], instance: Any 

797 ) -> Any: 

798 instance = self._restore_from_dict(obj, instance) 

799 

800 # Handle list and set subclasses 

801 if has_tag(obj, tags.SEQ): 

802 if hasattr(instance, "append"): 

803 for v in obj[tags.SEQ]: 

804 instance.append(self._restore(v)) 

805 elif hasattr(instance, "add"): 

806 for v in obj[tags.SEQ]: 

807 instance.add(self._restore(v)) 

808 

809 if has_tag(obj, tags.STATE): 

810 instance = self._restore_state(obj, instance) 

811 

812 return instance 

813 

814 def _restore_object_instance( 

815 self, obj: Dict[str, Any], cls: Type[Any], class_name: str = "" 

816 ) -> Any: 

817 # This is a placeholder proxy object which allows child objects to 

818 # reference the parent object before it has been instantiated. 

819 proxy = _Proxy() 

820 self._mkref(proxy) 

821 

822 # An object can install itself as its own factory, so load the factory 

823 # after the instance is available for referencing. 

824 factory = self._loadfactory(obj) 

825 

826 if has_tag(obj, tags.NEWARGSEX): 

827 args, kwargs = obj[tags.NEWARGSEX] 

828 else: 

829 args = getargs(obj, classes=self._classes) 

830 kwargs = {} 

831 if args: 

832 args = self._restore(args) 

833 if kwargs: 

834 kwargs = self._restore(kwargs) 

835 

836 is_oldstyle = not (isinstance(cls, type) or getattr(cls, "__meta__", None)) 

837 try: 

838 if not is_oldstyle and hasattr(cls, "__new__"): 

839 # new style classes 

840 if factory: 

841 instance = cls.__new__(cls, factory, *args, **kwargs) 

842 instance.default_factory = factory 

843 else: 

844 instance = cls.__new__(cls, *args, **kwargs) 

845 else: 

846 instance = object.__new__(cls) 

847 except TypeError: # old-style classes 

848 is_oldstyle = True 

849 

850 if is_oldstyle: 

851 try: 

852 instance = cls(*args) 

853 except TypeError: # fail gracefully 

854 try: 

855 instance = make_blank_classic(cls) 

856 except Exception: # fail gracefully 

857 self._process_missing(class_name) 

858 return self._mkref(obj) 

859 

860 proxy.reset(instance) 

861 self._swapref(proxy, instance) 

862 

863 if isinstance(instance, tuple): 

864 return instance 

865 

866 instance = self._restore_object_instance_variables(obj, instance) 

867 

868 if _safe_hasattr(instance, "default_factory") and isinstance( 

869 instance.default_factory, _Proxy 

870 ): 

871 instance.default_factory = instance.default_factory.get() 

872 

873 return instance 

874 

875 def _restore_object(self, obj: Dict[str, Any]) -> Any: 

876 class_name = obj[tags.OBJECT] 

877 cls = util.loadclass(class_name, classes=self._classes) 

878 handler = handlers.get(cls, handlers.get(class_name)) # type: ignore[arg-type] 

879 if handler is not None: # custom handler 

880 proxy = _Proxy() 

881 self._mkref(proxy) 

882 handler_instance = handler(self) 

883 instance = self._call_handler_restore(handler_instance, obj) 

884 proxy.reset(instance) 

885 self._swapref(proxy, instance) 

886 return instance 

887 

888 if cls is None: 

889 self._process_missing(class_name) 

890 return self._mkref(obj) 

891 

892 return self._restore_object_instance(obj, cls, class_name) 

893 

894 def _restore_function(self, obj: Dict[str, Any]) -> Any: 

895 return util.loadclass(obj[tags.FUNCTION], classes=self._classes) 

896 

897 def _restore_set(self, obj: Dict[str, Any]) -> Set[Any]: 

898 try: 

899 return {self._restore(v) for v in obj[tags.SET]} 

900 except TypeError: 

901 return set() 

902 

903 def _restore_dict(self, obj: Dict[str, Any]) -> Dict[str, Any]: 

904 data = {} 

905 if not self.v1_decode: 

906 self._mkref(data) 

907 

908 # If we are decoding dicts that can have non-string keys then we 

909 # need to do a two-phase decode where the non-string keys are 

910 # processed last. This ensures a deterministic order when 

911 # assigning object IDs for references. 

912 if self.keys: 

913 # Phase 1: regular non-special keys. 

914 for k, v in util.items(obj): 

915 if _is_json_key(k): 

916 continue 

917 if isinstance(k, (int, float)): 

918 str_k = k.__str__() 

919 else: 

920 str_k = k 

921 self._namestack.append(str_k) 

922 data[k] = self._restore(v) 

923 

924 self._namestack.pop() 

925 

926 # Phase 2: object keys only. 

927 for k, v in util.items(obj): 

928 if not _is_json_key(k): 

929 continue 

930 self._namestack.append(k) 

931 

932 k = self._restore_pickled_key(k) 

933 data[k] = result = self._restore(v) 

934 # k is currently a proxy and must be replaced 

935 if isinstance(result, _Proxy): 

936 self._proxies.append((data, k, result, _obj_setvalue)) 

937 

938 self._namestack.pop() 

939 else: 

940 # No special keys, thus we don't need to restore the keys either. 

941 for k, v in util.items(obj): 

942 if isinstance(k, (int, float)): 

943 str_k = k.__str__() 

944 else: 

945 str_k = k 

946 self._namestack.append(str_k) 

947 data[k] = result = self._restore(v) 

948 if isinstance(result, _Proxy): 

949 self._proxies.append((data, k, result, _obj_setvalue)) 

950 self._namestack.pop() 

951 return data 

952 

953 def _restore_tuple(self, obj: Dict[str, Any]) -> Tuple[Any, ...]: 

954 try: 

955 return tuple(self._restore(v) for v in obj[tags.TUPLE]) 

956 except TypeError: 

957 return () 

958 

959 def _restore_tags( 

960 self, obj: Any, _passthrough: Callable[[Any], Any] = _passthrough 

961 ) -> Callable[[Any], Any]: 

962 """Return the restoration function for the specified object""" 

963 try: 

964 if not tags.RESERVED <= set(obj) and type(obj) not in (list, dict): 

965 return _passthrough 

966 except TypeError: 

967 pass 

968 if type(obj) is dict: 

969 if tags.TUPLE in obj: 

970 restore = self._restore_tuple 

971 elif tags.SET in obj: 

972 restore = self._restore_set # type: ignore[assignment] 

973 elif tags.B64 in obj: 

974 restore = self._restore_base64 # type: ignore[assignment] 

975 elif tags.B85 in obj: 

976 restore = self._restore_base85 # type: ignore[assignment] 

977 elif tags.ID in obj: 

978 restore = self._restore_id 

979 elif tags.ITERATOR in obj: 

980 restore = self._restore_iterator # type: ignore[assignment] 

981 elif tags.OBJECT in obj: 

982 restore = self._restore_object 

983 elif tags.TYPE in obj: 

984 restore = self._restore_type 

985 elif tags.REDUCE in obj: 

986 restore = self._restore_reduce 

987 elif tags.FUNCTION in obj: 

988 restore = self._restore_function 

989 elif tags.MODULE in obj: 

990 restore = self._restore_module 

991 elif tags.REPR in obj: 

992 if self.safe: 

993 restore = self._restore_repr_safe 

994 else: 

995 restore = self._restore_repr 

996 else: 

997 restore = self._restore_dict # type: ignore[assignment] 

998 elif type(obj) is list: 

999 restore = self._restore_list # type: ignore[assignment] 

1000 else: 

1001 restore = _passthrough # type: ignore[assignment] 

1002 return restore 

1003 

1004 def _call_handler_restore( 

1005 self, handler: handlers.BaseHandler, obj: Dict[str, Any] 

1006 ) -> Any: 

1007 kwargs: dict[str, Any] = {} 

1008 if ( 

1009 self.handler_context is not None 

1010 and handlers.handler_accepts_handler_context(handler.restore) 

1011 ): 

1012 kwargs["handler_context"] = self.handler_context 

1013 return handler.restore(obj, **kwargs)