Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.11/site-packages/jsonpickle/pickler.py: 11%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

405 statements  

1# Copyright (C) 2008 John Paulett (john -at- paulett.org) 

2# Copyright (C) 2009-2024 David Aguilar (davvid -at- gmail.com) 

3# All rights reserved. 

4# 

5# This software is licensed as described in the file COPYING, which 

6# you should have received as part of this distribution. 

7import decimal 

8import inspect 

9import itertools 

10import sys 

11import types 

12import warnings 

13from itertools import chain, islice 

14from typing import Any, Callable, Dict, Iterable, List, Optional, Sequence, Type, Union 

15 

16from . import handlers, tags, util 

17from .backend import JSONBackend, json 

18 

19 

20def encode( 

21 value: Any, 

22 unpicklable: bool = True, 

23 make_refs: bool = True, 

24 keys: bool = False, 

25 max_depth: Optional[int] = None, 

26 reset: bool = True, 

27 backend: Optional[JSONBackend] = None, 

28 warn: bool = False, 

29 context: Optional["Pickler"] = None, 

30 max_iter: Optional[int] = None, 

31 use_decimal: bool = False, 

32 numeric_keys: bool = False, 

33 use_base85: bool = False, 

34 fail_safe: Optional[Callable[[Exception], Any]] = None, 

35 indent: Optional[int] = None, 

36 separators: Optional[Any] = None, 

37 include_properties: bool = False, 

38 handle_readonly: bool = False, 

39) -> str: 

40 """Return a JSON formatted representation of value, a Python object. 

41 

42 :param unpicklable: If set to ``False`` then the output will not contain the 

43 information necessary to turn the JSON data back into Python objects, 

44 but a simpler JSON stream is produced. It's recommended to set this 

45 parameter to ``False`` when your code does not rely on two objects 

46 having the same ``id()`` value, and when it is sufficient for those two 

47 objects to be equal by ``==``, such as when serializing sklearn 

48 instances. If you experience (de)serialization being incorrect when you 

49 use numpy, pandas, or sklearn handlers, this should be set to ``False``. 

50 If you want the output to not include the dtype for numpy arrays, add:: 

51 

52 jsonpickle.register( 

53 numpy.generic, UnpicklableNumpyGenericHandler, base=True 

54 ) 

55 

56 before your pickling code. 

57 :param make_refs: If set to False jsonpickle's referencing support is 

58 disabled. Objects that are id()-identical won't be preserved across 

59 encode()/decode(), but the resulting JSON stream will be conceptually 

60 simpler. jsonpickle detects cyclical objects and will break the cycle 

61 by calling repr() instead of recursing when make_refs is set False. 

62 :param keys: If set to True then jsonpickle will encode non-string 

63 dictionary keys instead of coercing them into strings via `repr()`. 

64 This is typically what you want if you need to support Integer or 

65 objects as dictionary keys. 

66 :param max_depth: If set to a non-negative integer then jsonpickle will 

67 not recurse deeper than 'max_depth' steps into the object. Anything 

68 deeper than 'max_depth' is represented using a Python repr() of the 

69 object. 

70 :param reset: Custom pickle handlers that use the `Pickler.flatten` method or 

71 `jsonpickle.encode` function must call `encode` with `reset=False` 

72 in order to retain object references during pickling. 

73 This flag is not typically used outside of a custom handler or 

74 `__getstate__` implementation. 

75 :param backend: If set to an instance of jsonpickle.backend.JSONBackend, 

76 jsonpickle will use that backend for deserialization. 

77 :param warn: If set to True then jsonpickle will warn when it 

78 returns None for an object which it cannot pickle 

79 (e.g. file descriptors). 

80 :param context: Supply a pre-built Pickler or Unpickler object to the 

81 `jsonpickle.encode` and `jsonpickle.decode` machinery instead 

82 of creating a new instance. The `context` represents the currently 

83 active Pickler and Unpickler objects when custom handlers are 

84 invoked by jsonpickle. 

85 :param max_iter: If set to a non-negative integer then jsonpickle will 

86 consume at most `max_iter` items when pickling iterators. 

87 :param use_decimal: If set to True jsonpickle will allow Decimal 

88 instances to pass-through, with the assumption that the simplejson 

89 backend will be used in `use_decimal` mode. In order to use this mode 

90 you will need to configure simplejson:: 

91 

92 jsonpickle.set_encoder_options('simplejson', 

93 use_decimal=True, sort_keys=True) 

94 jsonpickle.set_decoder_options('simplejson', 

95 use_decimal=True) 

96 jsonpickle.set_preferred_backend('simplejson') 

97 

98 NOTE: A side-effect of the above settings is that float values will be 

99 converted to Decimal when converting to json. 

100 :param numeric_keys: Only use this option if the backend supports integer 

101 dict keys natively. This flag tells jsonpickle to leave numeric keys 

102 as-is rather than conforming them to json-friendly strings. 

103 Using ``keys=True`` is the typical solution for integer keys, so only 

104 use this if you have a specific use case where you want to allow the 

105 backend to handle serialization of numeric dict keys. 

106 :param use_base85: 

107 If possible, use base85 to encode binary data. Base85 bloats binary data 

108 by 1/4 as opposed to base64, which expands it by 1/3. This argument is 

109 ignored on Python 2 because it doesn't support it. 

110 :param fail_safe: If set to a function exceptions are ignored when pickling 

111 and if a exception happens the function is called and the return value 

112 is used as the value for the object that caused the error 

113 :param indent: When `indent` is a non-negative integer, then JSON array 

114 elements and object members will be pretty-printed with that indent 

115 level. An indent level of 0 will only insert newlines. ``None`` is 

116 the most compact representation. Since the default item separator is 

117 ``(', ', ': ')``, the output might include trailing whitespace when 

118 ``indent`` is specified. You can use ``separators=(',', ': ')`` to 

119 avoid this. This value is passed directly to the active JSON backend 

120 library and not used by jsonpickle directly. 

121 :param separators: 

122 If ``separators`` is an ``(item_separator, dict_separator)`` tuple 

123 then it will be used instead of the default ``(', ', ': ')`` 

124 separators. ``(',', ':')`` is the most compact JSON representation. 

125 This value is passed directly to the active JSON backend library and 

126 not used by jsonpickle directly. 

127 :param include_properties: 

128 Include the names and values of class properties in the generated json. 

129 Properties are unpickled properly regardless of this setting, this is 

130 meant to be used if processing the json outside of Python. Certain types 

131 such as sets will not pickle due to not having a native-json equivalent. 

132 Defaults to ``False``. 

133 :param handle_readonly: 

134 Handle objects with readonly methods, such as Django's SafeString. This 

135 basically prevents jsonpickle from raising an exception for such objects. 

136 You MUST set ``handle_readonly=True`` for the decoding if you encode with 

137 this flag set to ``True``. 

138 

139 >>> encode('my string') == '"my string"' 

140 True 

141 >>> encode(36) == '36' 

142 True 

143 >>> encode({'foo': True}) == '{"foo": true}' 

144 True 

145 >>> encode({'foo': [1, 2, [3, 4]]}, max_depth=1) 

146 '{"foo": "[1, 2, [3, 4]]"}' 

147 

148 """ 

149 backend = backend or json 

150 context = context or Pickler( 

151 unpicklable=unpicklable, 

152 make_refs=make_refs, 

153 keys=keys, 

154 backend=backend, 

155 max_depth=max_depth, 

156 warn=warn, 

157 max_iter=max_iter, 

158 numeric_keys=numeric_keys, 

159 use_decimal=use_decimal, 

160 use_base85=use_base85, 

161 fail_safe=fail_safe, 

162 include_properties=include_properties, 

163 handle_readonly=handle_readonly, 

164 original_object=value, 

165 ) 

166 return backend.encode( 

167 context.flatten(value, reset=reset), indent=indent, separators=separators 

168 ) 

169 

170 

171def _in_cycle( 

172 obj: Any, objs: Dict[int, int], max_reached: bool, make_refs: bool 

173) -> bool: 

174 """Detect cyclic structures that would lead to infinite recursion""" 

175 return ( 

176 (max_reached or (not make_refs and id(obj) in objs)) 

177 and not util._is_primitive(obj) 

178 and not util._is_enum(obj) 

179 ) 

180 

181 

182def _mktyperef(obj: Type[Any]) -> Dict[str, str]: 

183 """Return a typeref dictionary 

184 

185 >>> _mktyperef(AssertionError) == {'py/type': 'builtins.AssertionError'} 

186 True 

187 

188 """ 

189 return {tags.TYPE: util.importable_name(obj)} 

190 

191 

192def _wrap_string_slot(string: Union[str, Sequence[str]]) -> Sequence[str]: 

193 """Converts __slots__ = 'a' into __slots__ = ('a',)""" 

194 if isinstance(string, str): 

195 return (string,) 

196 return string 

197 

198 

199class Pickler: 

200 def __init__( 

201 self, 

202 unpicklable: bool = True, 

203 make_refs: bool = True, 

204 max_depth: Optional[int] = None, 

205 backend: Optional[JSONBackend] = None, 

206 keys: bool = False, 

207 warn: bool = False, 

208 max_iter: Optional[int] = None, 

209 numeric_keys: bool = False, 

210 use_decimal: bool = False, 

211 use_base85: bool = False, 

212 fail_safe: Optional[Callable[[Exception], Any]] = None, 

213 include_properties: bool = False, 

214 handle_readonly: bool = False, 

215 original_object: Optional[Any] = None, 

216 ) -> None: 

217 self.unpicklable = unpicklable 

218 self.make_refs = make_refs 

219 self.backend = backend or json 

220 self.keys = keys 

221 self.warn = warn 

222 self.numeric_keys = numeric_keys 

223 self.use_base85 = use_base85 

224 # The current recursion depth 

225 self._depth = -1 

226 # The maximal recursion depth 

227 self._max_depth = max_depth 

228 # Maps id(obj) to reference IDs 

229 self._objs = {} 

230 # Avoids garbage collection 

231 self._seen = [] 

232 # maximum amount of items to take from a pickled iterator 

233 self._max_iter = max_iter 

234 # Whether to allow decimals to pass-through 

235 self._use_decimal = use_decimal 

236 # A cache of objects that have already been flattened. 

237 self._flattened = {} 

238 # Used for util._is_readonly, see +483 

239 self.handle_readonly = handle_readonly 

240 

241 if self.use_base85: 

242 self._bytes_tag = tags.B85 

243 self._bytes_encoder = util.b85encode 

244 else: 

245 self._bytes_tag = tags.B64 

246 self._bytes_encoder = util.b64encode 

247 

248 # ignore exceptions 

249 self.fail_safe = fail_safe 

250 self.include_properties = include_properties 

251 

252 self._original_object = original_object 

253 

254 def _determine_sort_keys(self) -> bool: 

255 for _, options in getattr(self.backend, "_encoder_options", {}).values(): 

256 if options.get("sort_keys", False): 

257 # the user has set one of the backends to sort keys 

258 return True 

259 return False 

260 

261 def _sort_attrs(self, obj: Any) -> Any: 

262 if hasattr(obj, "__slots__") and self.warn: 

263 # Slots are read-only by default, the only way 

264 # to sort keys is to do it in a subclass 

265 # and that would require calling the init function 

266 # of the parent again. That could cause issues 

267 # so we refuse to handle it. 

268 raise TypeError( 

269 "Objects with __slots__ cannot have their keys reliably sorted by " 

270 "jsonpickle! Please sort the keys in the __slots__ definition instead." 

271 ) 

272 # Somehow some classes don't have slots or dict 

273 elif hasattr(obj, "__dict__"): 

274 try: 

275 obj.__dict__ = dict(sorted(obj.__dict__.items())) 

276 except (TypeError, AttributeError): 

277 # Can't set attributes of builtin/extension type 

278 pass 

279 return obj 

280 

281 def reset(self) -> None: 

282 self._objs = {} 

283 self._depth = -1 

284 self._seen = [] 

285 self._flattened = {} 

286 

287 def _push(self) -> None: 

288 """Steps down one level in the namespace.""" 

289 self._depth += 1 

290 

291 def _pop(self, value: Any) -> Any: 

292 """Step up one level in the namespace and return the value. 

293 If we're at the root, reset the pickler's state. 

294 """ 

295 self._depth -= 1 

296 if self._depth == -1: 

297 self.reset() 

298 return value 

299 

300 def _log_ref(self, obj: Any) -> bool: 

301 """ 

302 Log a reference to an in-memory object. 

303 Return True if this object is new and was assigned 

304 a new ID. Otherwise return False. 

305 """ 

306 objid = id(obj) 

307 is_new = objid not in self._objs 

308 if is_new: 

309 new_id = len(self._objs) 

310 self._objs[objid] = new_id 

311 return is_new 

312 

313 def _mkref(self, obj: Any) -> bool: 

314 """ 

315 Log a reference to an in-memory object, and return 

316 if that object should be considered newly logged. 

317 """ 

318 is_new = self._log_ref(obj) 

319 # Pretend the object is new 

320 pretend_new = not self.unpicklable or not self.make_refs 

321 return pretend_new or is_new 

322 

323 def _getref(self, obj: Any) -> Dict[str, int]: 

324 """Return a "py/id" entry for the specified object""" 

325 return {tags.ID: self._objs.get(id(obj))} # type: ignore[dict-item] 

326 

327 def _flatten(self, obj: Any) -> Any: 

328 """Flatten an object and its guts into a json-safe representation""" 

329 if self.unpicklable and self.make_refs: 

330 result = self._flatten_impl(obj) 

331 else: 

332 try: 

333 result = self._flattened[id(obj)] 

334 except KeyError: 

335 result = self._flattened[id(obj)] = self._flatten_impl(obj) 

336 return result 

337 

338 def flatten(self, obj: Any, reset: bool = True) -> Any: 

339 """Takes an object and returns a JSON-safe representation of it. 

340 

341 Simply returns any of the basic builtin datatypes 

342 

343 >>> p = Pickler() 

344 >>> p.flatten('hello world') == 'hello world' 

345 True 

346 >>> p.flatten(49) 

347 49 

348 >>> p.flatten(350.0) 

349 350.0 

350 >>> p.flatten(True) 

351 True 

352 >>> p.flatten(False) 

353 False 

354 >>> r = p.flatten(None) 

355 >>> r is None 

356 True 

357 >>> p.flatten(False) 

358 False 

359 >>> p.flatten([1, 2, 3, 4]) 

360 [1, 2, 3, 4] 

361 >>> p.flatten((1,2,))[tags.TUPLE] 

362 [1, 2] 

363 >>> p.flatten({'key': 'value'}) == {'key': 'value'} 

364 True 

365 """ 

366 if reset: 

367 self.reset() 

368 if self._determine_sort_keys(): 

369 obj = self._sort_attrs(obj) 

370 return self._flatten(obj) 

371 

372 def _flatten_bytestring(self, obj: bytes) -> Dict[str, str]: 

373 return {self._bytes_tag: self._bytes_encoder(obj)} 

374 

375 def _flatten_impl(self, obj: Any) -> Any: 

376 ######################################### 

377 # if obj is nonrecursive return immediately 

378 # for performance reasons we don't want to do recursive checks 

379 if type(obj) is bytes: 

380 return self._flatten_bytestring(obj) 

381 

382 # Decimal is a primitive when use_decimal is True 

383 if type(obj) in (str, bool, int, float, type(None)) or ( 

384 self._use_decimal and isinstance(obj, decimal.Decimal) 

385 ): 

386 return obj 

387 ######################################### 

388 

389 self._push() 

390 return self._pop(self._flatten_obj(obj)) 

391 

392 def _max_reached(self) -> bool: 

393 return self._depth == self._max_depth 

394 

395 def _pickle_warning(self, obj: Any) -> None: 

396 if self.warn: 

397 msg = "jsonpickle cannot pickle %r: replaced with None" % obj 

398 warnings.warn(msg) 

399 

400 def _flatten_obj(self, obj: Any) -> Any: 

401 self._seen.append(obj) 

402 

403 max_reached = self._max_reached() 

404 

405 try: 

406 in_cycle = _in_cycle(obj, self._objs, max_reached, self.make_refs) 

407 if in_cycle: 

408 # break the cycle 

409 flatten_func = repr 

410 else: 

411 flatten_func = self._get_flattener(obj) # type: ignore[assignment] 

412 

413 if flatten_func is None: 

414 self._pickle_warning(obj) 

415 return None 

416 

417 return flatten_func(obj) 

418 

419 except (KeyboardInterrupt, SystemExit) as e: 

420 raise e 

421 except Exception as e: 

422 if self.fail_safe is None: 

423 raise e 

424 else: 

425 return self.fail_safe(e) 

426 

427 def _list_recurse(self, obj: Iterable[Any]) -> List[Any]: 

428 return [self._flatten(v) for v in obj] 

429 

430 def _flatten_function(self, obj: Callable[..., Any]) -> Optional[Dict[str, str]]: 

431 if self.unpicklable: 

432 data = {tags.FUNCTION: util.importable_name(obj)} 

433 else: 

434 data = None 

435 

436 return data 

437 

438 def _getstate(self, obj: Any, data: Dict[str, Any]) -> Dict[str, Any]: 

439 state = self._flatten(obj) 

440 if self.unpicklable: 

441 data[tags.STATE] = state 

442 else: 

443 data = state 

444 return data 

445 

446 def _flatten_key_value_pair( 

447 self, k: Any, v: Any, data: Dict[Union[str, Any], Any] 

448 ) -> Dict[Union[str, Any], Any]: 

449 """Flatten a key/value pair into the passed-in dictionary.""" 

450 if not util._is_picklable(k, v): 

451 return data 

452 # TODO: use inspect.getmembers_static on 3.11+ because it avoids dynamic 

453 # attribute lookups 

454 if ( 

455 self.handle_readonly 

456 and k in {attr for attr, val in inspect.getmembers(self._original_object)} 

457 and util._is_readonly(self._original_object, k, v) 

458 ): 

459 return data 

460 

461 if k is None: 

462 k = "null" # for compatibility with common json encoders 

463 

464 if self.numeric_keys and isinstance(k, (int, float)): 

465 pass 

466 elif not isinstance(k, str): 

467 try: 

468 k = repr(k) 

469 except Exception: 

470 k = str(k) 

471 

472 data[k] = self._flatten(v) 

473 return data 

474 

475 def _flatten_obj_attrs( 

476 self, 

477 obj: Any, 

478 attrs: Iterable[str], 

479 data: Dict[str, Any], 

480 exclude: Iterable[str] = (), 

481 ) -> bool: 

482 flatten = self._flatten_key_value_pair 

483 ok = False 

484 exclude = set(exclude) 

485 for k in attrs: 

486 if k in exclude: 

487 continue 

488 try: 

489 if not k.startswith("__"): 

490 value = getattr(obj, k) 

491 else: 

492 value = getattr(obj, f"_{obj.__class__.__name__}{k}") 

493 flatten(k, value, data) 

494 except AttributeError: 

495 # The attribute may have been deleted 

496 continue 

497 ok = True 

498 return ok 

499 

500 def _flatten_properties( 

501 self, 

502 obj: Any, 

503 data: Dict[str, Any], 

504 allslots: Optional[Iterable[Sequence[str]]] = None, 

505 ) -> Dict[str, Any]: 

506 if allslots is None: 

507 # setting a list as a default argument can lead to some weird errors 

508 allslots = [] 

509 

510 # convert to set in case there are a lot of slots 

511 allslots_set = set(itertools.chain.from_iterable(allslots)) 

512 

513 # i don't like lambdas 

514 def valid_property(x: tuple[str, Any]) -> bool: 

515 return not x[0].startswith("__") and x[0] not in allslots_set 

516 

517 properties = [ 

518 x[0] for x in inspect.getmembers(obj.__class__) if valid_property(x) 

519 ] 

520 

521 properties_dict = {} 

522 for p_name in properties: 

523 p_val = getattr(obj, p_name) 

524 if util._is_not_class(p_val): 

525 properties_dict[p_name] = p_val 

526 else: 

527 properties_dict[p_name] = self._flatten(p_val) 

528 

529 data[tags.PROPERTY] = properties_dict 

530 

531 return data 

532 

533 def _flatten_newstyle_with_slots( 

534 self, 

535 obj: Any, 

536 data: Dict[str, Any], 

537 exclude: Iterable[str] = (), 

538 ) -> Dict[str, Any]: 

539 """Return a json-friendly dict for new-style objects with __slots__.""" 

540 allslots = [ 

541 _wrap_string_slot(getattr(cls, "__slots__", tuple())) 

542 for cls in obj.__class__.mro() 

543 ] 

544 

545 # add properties to the attribute list 

546 if self.include_properties: 

547 data = self._flatten_properties(obj, data, allslots) 

548 

549 if not self._flatten_obj_attrs(obj, chain(*allslots), data, exclude): 

550 attrs = [ 

551 x for x in dir(obj) if not x.startswith("__") and not x.endswith("__") 

552 ] 

553 self._flatten_obj_attrs(obj, attrs, data, exclude) 

554 

555 return data 

556 

557 def _flatten_obj_instance( 

558 self, obj: Any 

559 ) -> Optional[Union[Dict[str, Any], List[Any], Any]]: 

560 """Recursively flatten an instance and return a json-friendly dict""" 

561 # we're generally not bothering to annotate parts that aren't part of the public API 

562 # but this annotation alone saves us 3 mypy "errors" 

563 data: Dict[str, Any] = {} 

564 has_class = hasattr(obj, "__class__") 

565 has_dict = hasattr(obj, "__dict__") 

566 has_slots = not has_dict and hasattr(obj, "__slots__") 

567 has_getnewargs = util.has_method(obj, "__getnewargs__") 

568 has_getnewargs_ex = util.has_method(obj, "__getnewargs_ex__") 

569 has_getinitargs = util.has_method(obj, "__getinitargs__") 

570 has_reduce, has_reduce_ex = util.has_reduce(obj) 

571 exclude = set(getattr(obj, "_jsonpickle_exclude", ())) 

572 

573 # Support objects with __getstate__(); this ensures that 

574 # both __setstate__() and __getstate__() are implemented 

575 has_own_getstate = hasattr(type(obj), "__getstate__") and type( 

576 obj 

577 ).__getstate__ is not getattr(object, "__getstate__", None) 

578 # not using has_method since __getstate__() is handled separately below 

579 # Note: on Python 3.11+, all objects have __getstate__. 

580 

581 if has_class: 

582 cls = obj.__class__ 

583 else: 

584 cls = type(obj) 

585 

586 # Check for a custom handler 

587 class_name = util.importable_name(cls) 

588 handler = handlers.get(cls, handlers.get(class_name)) # type: ignore[arg-type] 

589 if handler is not None: 

590 if self.unpicklable: 

591 data[tags.OBJECT] = class_name 

592 result = handler(self).flatten(obj, data) 

593 if result is None: 

594 self._pickle_warning(obj) 

595 return result 

596 

597 reduce_val = None 

598 

599 if self.include_properties: 

600 data = self._flatten_properties(obj, data) 

601 

602 if self.unpicklable: 

603 if has_reduce and not has_reduce_ex: 

604 try: 

605 reduce_val = obj.__reduce__() 

606 except TypeError: 

607 # A lot of builtin types have a reduce which 

608 # just raises a TypeError 

609 # we ignore those 

610 pass 

611 

612 # test for a reduce implementation, and redirect before 

613 # doing anything else if that is what reduce requests 

614 elif has_reduce_ex: 

615 try: 

616 # we're implementing protocol 2 

617 reduce_val = obj.__reduce_ex__(2) 

618 except TypeError: 

619 # A lot of builtin types have a reduce which 

620 # just raises a TypeError 

621 # we ignore those 

622 pass 

623 

624 if reduce_val and isinstance(reduce_val, str): 

625 try: 

626 varpath = iter(reduce_val.split(".")) 

627 # curmod will be transformed by the 

628 # loop into the value to pickle 

629 curmod = sys.modules[next(varpath)] 

630 for modname in varpath: 

631 curmod = getattr(curmod, modname) 

632 # replace obj with value retrieved 

633 return self._flatten(curmod) 

634 except KeyError: 

635 # well, we can't do anything with that, so we ignore it 

636 pass 

637 

638 elif reduce_val: 

639 # at this point, reduce_val should be some kind of iterable 

640 # pad out to len 5 

641 rv_as_list = list(reduce_val) 

642 insufficiency = 5 - len(rv_as_list) 

643 if insufficiency: 

644 rv_as_list += [None] * insufficiency 

645 

646 if getattr(rv_as_list[0], "__name__", "") == "__newobj__": 

647 rv_as_list[0] = tags.NEWOBJ 

648 

649 f, args, state, listitems, dictitems = rv_as_list 

650 

651 # check that getstate/setstate is sane 

652 if not ( 

653 state 

654 and has_own_getstate 

655 and not hasattr(obj, "__setstate__") 

656 and not isinstance(obj, dict) 

657 ): 

658 # turn iterators to iterables for convenient serialization 

659 if rv_as_list[3]: 

660 rv_as_list[3] = tuple(rv_as_list[3]) 

661 

662 if rv_as_list[4]: 

663 rv_as_list[4] = tuple(rv_as_list[4]) 

664 

665 reduce_args = list(map(self._flatten, rv_as_list)) 

666 last_index = len(reduce_args) - 1 

667 while last_index >= 2 and reduce_args[last_index] is None: 

668 last_index -= 1 

669 data[tags.REDUCE] = reduce_args[: last_index + 1] 

670 

671 return data 

672 

673 if has_class and not isinstance(obj, types.ModuleType): 

674 if self.unpicklable: 

675 data[tags.OBJECT] = class_name 

676 

677 if has_getnewargs_ex: 

678 data[tags.NEWARGSEX] = [ 

679 self._flatten(arg) for arg in obj.__getnewargs_ex__() 

680 ] 

681 

682 if has_getnewargs and not has_getnewargs_ex: 

683 data[tags.NEWARGS] = self._flatten(obj.__getnewargs__()) 

684 

685 if has_getinitargs: 

686 data[tags.INITARGS] = self._flatten(obj.__getinitargs__()) 

687 

688 if has_own_getstate: 

689 try: 

690 state = obj.__getstate__() 

691 except TypeError: 

692 # Has getstate but it cannot be called, e.g. file descriptors 

693 # in Python3 

694 self._pickle_warning(obj) 

695 return None 

696 else: 

697 if exclude and isinstance(state, dict): 

698 state = {k: v for k, v in util.items(state, exclude=exclude)} 

699 if state: 

700 return self._getstate(state, data) 

701 

702 if isinstance(obj, types.ModuleType): 

703 if self.unpicklable: 

704 data[tags.MODULE] = "{name}/{name}".format(name=obj.__name__) 

705 else: 

706 # TODO: this causes a mypy assignment error, figure out 

707 # if it's actually an error or a false alarm 

708 data = str(obj) # type: ignore[assignment] 

709 return data 

710 

711 if util._is_dictionary_subclass(obj): 

712 self._flatten_dict_obj(obj, data, exclude=exclude) 

713 return data 

714 

715 if util._is_sequence_subclass(obj): 

716 return self._flatten_sequence_obj(obj, data) 

717 

718 if util._is_iterator(obj): 

719 # force list in python 3 

720 data[tags.ITERATOR] = list(map(self._flatten, islice(obj, self._max_iter))) 

721 return data 

722 

723 if has_dict: 

724 # Support objects that subclasses list and set 

725 if util._is_sequence_subclass(obj): 

726 return self._flatten_sequence_obj(obj, data) 

727 

728 # hack for zope persistent objects; this unghostifies the object 

729 getattr(obj, "_", None) 

730 return self._flatten_dict_obj(obj.__dict__, data, exclude=exclude) 

731 

732 if has_slots: 

733 return self._flatten_newstyle_with_slots(obj, data, exclude=exclude) 

734 

735 # catchall return for data created above without a return 

736 # (e.g. __getnewargs__ is not supposed to be the end of the story) 

737 if data: 

738 return data 

739 

740 self._pickle_warning(obj) 

741 return None 

742 

743 def _ref_obj_instance(self, obj: Any) -> Optional[Union[Dict[str, Any], List[Any]]]: 

744 """Reference an existing object or flatten if new""" 

745 if self.unpicklable: 

746 if self._mkref(obj): 

747 # We've never seen this object so return its 

748 # json representation. 

749 return self._flatten_obj_instance(obj) 

750 # We've seen this object before so place an object 

751 # reference tag in the data. This avoids infinite recursion 

752 # when processing cyclical objects. 

753 return self._getref(obj) 

754 else: 

755 max_reached = self._max_reached() 

756 in_cycle = _in_cycle(obj, self._objs, max_reached, False) 

757 if in_cycle: 

758 # A circular becomes None. 

759 return None 

760 

761 self._mkref(obj) 

762 return self._flatten_obj_instance(obj) 

763 

764 def _escape_key(self, k: Any) -> str: 

765 return tags.JSON_KEY + encode( 

766 k, 

767 reset=False, 

768 keys=True, 

769 context=self, 

770 backend=self.backend, 

771 make_refs=self.make_refs, 

772 ) 

773 

774 def _flatten_non_string_key_value_pair( 

775 self, k: Any, v: Any, data: Dict[str, Any] 

776 ) -> Dict[str, Any]: 

777 """Flatten only non-string key/value pairs""" 

778 if not util._is_picklable(k, v): 

779 return data 

780 if self.keys and not isinstance(k, str): 

781 k = self._escape_key(k) 

782 data[k] = self._flatten(v) 

783 return data 

784 

785 def _flatten_string_key_value_pair( 

786 self, k: str, v: Any, data: Dict[str, Any] 

787 ) -> Dict[str, Any]: 

788 """Flatten string key/value pairs only.""" 

789 if not util._is_picklable(k, v): 

790 return data 

791 if self.keys: 

792 if not isinstance(k, str): 

793 return data 

794 elif k.startswith(tags.JSON_KEY): 

795 k = self._escape_key(k) 

796 else: 

797 if k is None: 

798 k = "null" # for compatibility with common json encoders 

799 

800 if self.numeric_keys and isinstance(k, (int, float)): 

801 pass 

802 elif not isinstance(k, str): 

803 try: 

804 k = repr(k) 

805 except Exception: 

806 k = str(k) 

807 

808 data[k] = self._flatten(v) 

809 return data 

810 

811 def _flatten_dict_obj( 

812 self, 

813 obj: dict[Any, Any], 

814 data: Optional[Dict[Any, Any]] = None, 

815 exclude: Iterable[Any] = (), 

816 ) -> Dict[str, Any]: 

817 """Recursively call flatten() and return json-friendly dict""" 

818 if data is None: 

819 data = obj.__class__() 

820 

821 # If we allow non-string keys then we have to do a two-phase 

822 # encoding to ensure that the reference IDs are deterministic. 

823 if self.keys: 

824 # Phase 1: serialize regular objects, ignore fancy keys. 

825 flatten = self._flatten_string_key_value_pair 

826 for k, v in util.items(obj, exclude=exclude): 

827 flatten(k, v, data) 

828 

829 # Phase 2: serialize non-string keys. 

830 flatten = self._flatten_non_string_key_value_pair 

831 for k, v in util.items(obj, exclude=exclude): 

832 flatten(k, v, data) 

833 else: 

834 # If we have string keys only then we only need a single pass. 

835 flatten = self._flatten_key_value_pair 

836 for k, v in util.items(obj, exclude=exclude): 

837 flatten(k, v, data) 

838 

839 # the collections.defaultdict protocol 

840 if hasattr(obj, "default_factory") and callable(obj.default_factory): 

841 factory = obj.default_factory 

842 # i know that this string could be moved above the hasattr to reduce 

843 # string duplication but mypy 1.18.2 complains and i don't want to use 

844 # even more type: ignores 

845 store_key = "default_factory" 

846 if store_key in data: 

847 store_key = tags.DEFAULT_FACTORY 

848 if util._is_type(factory): 

849 # Reference the class/type 

850 # in this case it's Dict[str, str] 

851 value: Dict[str, str] = _mktyperef(factory) 

852 else: 

853 # The factory is not a type and could reference e.g. functions 

854 # or even the object instance itself, which creates a cycle. 

855 if self._mkref(factory): 

856 # We've never seen this object before so pickle it in-place. 

857 # Create an instance from the factory and assume that the 

858 # resulting instance is a suitable exemplar. 

859 value: Dict[str, Any] = self._flatten_obj_instance(handlers.CloneFactory(factory())) # type: ignore[no-redef] 

860 else: 

861 # We've seen this object before. 

862 # Break the cycle by emitting a reference. 

863 # in this case it's Dict[str, int] 

864 value: Dict[str, int] = self._getref(factory) # type: ignore[no-redef] 

865 data[store_key] = value 

866 

867 # Sub-classes of dict 

868 if hasattr(obj, "__dict__") and self.unpicklable and obj != obj.__dict__: 

869 if self._mkref(obj.__dict__): 

870 dict_data = {} 

871 self._flatten_dict_obj(obj.__dict__, dict_data, exclude=exclude) 

872 data["__dict__"] = dict_data 

873 else: 

874 data["__dict__"] = self._getref(obj.__dict__) 

875 

876 return data 

877 

878 def _get_flattener(self, obj: Any) -> Optional[Callable[[Any], Any]]: 

879 if type(obj) in (list, dict): 

880 if self._mkref(obj): 

881 return ( 

882 self._list_recurse if type(obj) is list else self._flatten_dict_obj 

883 ) 

884 else: 

885 return self._getref 

886 

887 # We handle tuples and sets by encoding them in a "(tuple|set)dict" 

888 elif type(obj) in (tuple, set): 

889 if not self.unpicklable: 

890 return self._list_recurse 

891 return lambda obj: { 

892 tags.TUPLE if type(obj) is tuple else tags.SET: [ 

893 self._flatten(v) for v in obj 

894 ] 

895 } 

896 

897 elif util._is_module_function(obj): 

898 return self._flatten_function 

899 

900 elif util._is_object(obj): 

901 return self._ref_obj_instance 

902 

903 elif util._is_type(obj): 

904 return _mktyperef 

905 

906 # instance methods, lambdas, old style classes... 

907 self._pickle_warning(obj) 

908 return None 

909 

910 def _flatten_sequence_obj( 

911 self, obj: Iterable[Any], data: Dict[str, Any] 

912 ) -> Union[Dict[str, Any], List[Any]]: 

913 """Return a json-friendly dict for a sequence subclass.""" 

914 if hasattr(obj, "__dict__"): 

915 self._flatten_dict_obj(obj.__dict__, data) 

916 value = [self._flatten(v) for v in obj] 

917 if self.unpicklable: 

918 data[tags.SEQ] = value 

919 else: 

920 return value 

921 return data