Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.11/site-packages/jsonpickle/pickler.py: 12%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

397 statements  

1# Copyright (C) 2008 John Paulett (john -at- paulett.org) 

2# Copyright (C) 2009-2024 David Aguilar (davvid -at- gmail.com) 

3# All rights reserved. 

4# 

5# This software is licensed as described in the file COPYING, which 

6# you should have received as part of this distribution. 

7import decimal 

8import inspect 

9import itertools 

10import sys 

11import types 

12import warnings 

13from itertools import chain, islice 

14from typing import Any, Callable, Dict, Iterable, List, Optional, Sequence, Type, Union 

15 

16from . import handlers, tags, util 

17from .backend import JSONBackend, json 

18 

19 

20def encode( 

21 value: Any, 

22 unpicklable: bool = True, 

23 make_refs: bool = True, 

24 keys: bool = False, 

25 max_depth: Optional[int] = None, 

26 reset: bool = True, 

27 backend: Optional[JSONBackend] = None, 

28 warn: bool = False, 

29 context: Optional["Pickler"] = None, 

30 max_iter: Optional[int] = None, 

31 use_decimal: bool = False, 

32 numeric_keys: bool = False, 

33 use_base85: bool = False, 

34 fail_safe: Optional[Callable[[Exception], Any]] = None, 

35 indent: Optional[int] = None, 

36 separators: Optional[Any] = None, 

37 include_properties: bool = False, 

38 handle_readonly: bool = False, 

39) -> str: 

40 """Return a JSON formatted representation of value, a Python object. 

41 

42 :param unpicklable: If set to ``False`` then the output will not contain the 

43 information necessary to turn the JSON data back into Python objects, 

44 but a simpler JSON stream is produced. It's recommended to set this 

45 parameter to ``False`` when your code does not rely on two objects 

46 having the same ``id()`` value, and when it is sufficient for those two 

47 objects to be equal by ``==``, such as when serializing sklearn 

48 instances. If you experience (de)serialization being incorrect when you 

49 use numpy, pandas, or sklearn handlers, this should be set to ``False``. 

50 If you want the output to not include the dtype for numpy arrays, add:: 

51 

52 jsonpickle.register( 

53 numpy.generic, UnpicklableNumpyGenericHandler, base=True 

54 ) 

55 

56 before your pickling code. 

57 :param make_refs: If set to False jsonpickle's referencing support is 

58 disabled. Objects that are id()-identical won't be preserved across 

59 encode()/decode(), but the resulting JSON stream will be conceptually 

60 simpler. jsonpickle detects cyclical objects and will break the cycle 

61 by calling repr() instead of recursing when make_refs is set False. 

62 :param keys: If set to True then jsonpickle will encode non-string 

63 dictionary keys instead of coercing them into strings via `repr()`. 

64 This is typically what you want if you need to support Integer or 

65 objects as dictionary keys. 

66 :param max_depth: If set to a non-negative integer then jsonpickle will 

67 not recurse deeper than 'max_depth' steps into the object. Anything 

68 deeper than 'max_depth' is represented using a Python repr() of the 

69 object. 

70 :param reset: Custom pickle handlers that use the `Pickler.flatten` method or 

71 `jsonpickle.encode` function must call `encode` with `reset=False` 

72 in order to retain object references during pickling. 

73 This flag is not typically used outside of a custom handler or 

74 `__getstate__` implementation. 

75 :param backend: If set to an instance of jsonpickle.backend.JSONBackend, 

76 jsonpickle will use that backend for deserialization. 

77 :param warn: If set to True then jsonpickle will warn when it 

78 returns None for an object which it cannot pickle 

79 (e.g. file descriptors). 

80 :param context: Supply a pre-built Pickler or Unpickler object to the 

81 `jsonpickle.encode` and `jsonpickle.decode` machinery instead 

82 of creating a new instance. The `context` represents the currently 

83 active Pickler and Unpickler objects when custom handlers are 

84 invoked by jsonpickle. 

85 :param max_iter: If set to a non-negative integer then jsonpickle will 

86 consume at most `max_iter` items when pickling iterators. 

87 :param use_decimal: If set to True jsonpickle will allow Decimal 

88 instances to pass-through, with the assumption that the simplejson 

89 backend will be used in `use_decimal` mode. In order to use this mode 

90 you will need to configure simplejson:: 

91 

92 jsonpickle.set_encoder_options('simplejson', 

93 use_decimal=True, sort_keys=True) 

94 jsonpickle.set_decoder_options('simplejson', 

95 use_decimal=True) 

96 jsonpickle.set_preferred_backend('simplejson') 

97 

98 NOTE: A side-effect of the above settings is that float values will be 

99 converted to Decimal when converting to json. 

100 :param numeric_keys: Only use this option if the backend supports integer 

101 dict keys natively. This flag tells jsonpickle to leave numeric keys 

102 as-is rather than conforming them to json-friendly strings. 

103 Using ``keys=True`` is the typical solution for integer keys, so only 

104 use this if you have a specific use case where you want to allow the 

105 backend to handle serialization of numeric dict keys. 

106 :param use_base85: 

107 If possible, use base85 to encode binary data. Base85 bloats binary data 

108 by 1/4 as opposed to base64, which expands it by 1/3. This argument is 

109 ignored on Python 2 because it doesn't support it. 

110 :param fail_safe: If set to a function exceptions are ignored when pickling 

111 and if a exception happens the function is called and the return value 

112 is used as the value for the object that caused the error 

113 :param indent: When `indent` is a non-negative integer, then JSON array 

114 elements and object members will be pretty-printed with that indent 

115 level. An indent level of 0 will only insert newlines. ``None`` is 

116 the most compact representation. Since the default item separator is 

117 ``(', ', ': ')``, the output might include trailing whitespace when 

118 ``indent`` is specified. You can use ``separators=(',', ': ')`` to 

119 avoid this. This value is passed directly to the active JSON backend 

120 library and not used by jsonpickle directly. 

121 :param separators: 

122 If ``separators`` is an ``(item_separator, dict_separator)`` tuple 

123 then it will be used instead of the default ``(', ', ': ')`` 

124 separators. ``(',', ':')`` is the most compact JSON representation. 

125 This value is passed directly to the active JSON backend library and 

126 not used by jsonpickle directly. 

127 :param include_properties: 

128 Include the names and values of class properties in the generated json. 

129 Properties are unpickled properly regardless of this setting, this is 

130 meant to be used if processing the json outside of Python. Certain types 

131 such as sets will not pickle due to not having a native-json equivalent. 

132 Defaults to ``False``. 

133 :param handle_readonly: 

134 Handle objects with readonly methods, such as Django's SafeString. This 

135 basically prevents jsonpickle from raising an exception for such objects. 

136 You MUST set ``handle_readonly=True`` for the decoding if you encode with 

137 this flag set to ``True``. 

138 

139 >>> encode('my string') == '"my string"' 

140 True 

141 >>> encode(36) == '36' 

142 True 

143 >>> encode({'foo': True}) == '{"foo": true}' 

144 True 

145 >>> encode({'foo': [1, 2, [3, 4]]}, max_depth=1) 

146 '{"foo": "[1, 2, [3, 4]]"}' 

147 

148 """ 

149 backend = backend or json 

150 context = context or Pickler( 

151 unpicklable=unpicklable, 

152 make_refs=make_refs, 

153 keys=keys, 

154 backend=backend, 

155 max_depth=max_depth, 

156 warn=warn, 

157 max_iter=max_iter, 

158 numeric_keys=numeric_keys, 

159 use_decimal=use_decimal, 

160 use_base85=use_base85, 

161 fail_safe=fail_safe, 

162 include_properties=include_properties, 

163 handle_readonly=handle_readonly, 

164 original_object=value, 

165 ) 

166 return backend.encode( 

167 context.flatten(value, reset=reset), indent=indent, separators=separators 

168 ) 

169 

170 

171def _in_cycle( 

172 obj: Any, objs: Dict[int, int], max_reached: bool, make_refs: bool 

173) -> bool: 

174 """Detect cyclic structures that would lead to infinite recursion""" 

175 return ( 

176 (max_reached or (not make_refs and id(obj) in objs)) 

177 and not util._is_primitive(obj) 

178 and not util._is_enum(obj) 

179 ) 

180 

181 

182def _mktyperef(obj: Type[Any]) -> Dict[str, str]: 

183 """Return a typeref dictionary 

184 

185 >>> _mktyperef(AssertionError) == {'py/type': 'builtins.AssertionError'} 

186 True 

187 

188 """ 

189 return {tags.TYPE: util.importable_name(obj)} 

190 

191 

192def _wrap_string_slot(string: Union[str, Sequence[str]]) -> Sequence[str]: 

193 """Converts __slots__ = 'a' into __slots__ = ('a',)""" 

194 if isinstance(string, str): 

195 return (string,) 

196 return string 

197 

198 

199class Pickler: 

200 def __init__( 

201 self, 

202 unpicklable: bool = True, 

203 make_refs: bool = True, 

204 max_depth: Optional[int] = None, 

205 backend: Optional[JSONBackend] = None, 

206 keys: bool = False, 

207 warn: bool = False, 

208 max_iter: Optional[int] = None, 

209 numeric_keys: bool = False, 

210 use_decimal: bool = False, 

211 use_base85: bool = False, 

212 fail_safe: Optional[Callable[[Exception], Any]] = None, 

213 include_properties: bool = False, 

214 handle_readonly: bool = False, 

215 original_object: Optional[Any] = None, 

216 ) -> None: 

217 self.unpicklable = unpicklable 

218 self.make_refs = make_refs 

219 self.backend = backend or json 

220 self.keys = keys 

221 self.warn = warn 

222 self.numeric_keys = numeric_keys 

223 self.use_base85 = use_base85 

224 # The current recursion depth 

225 self._depth = -1 

226 # The maximal recursion depth 

227 self._max_depth = max_depth 

228 # Maps id(obj) to reference IDs 

229 self._objs = {} 

230 # Avoids garbage collection 

231 self._seen = [] 

232 # maximum amount of items to take from a pickled iterator 

233 self._max_iter = max_iter 

234 # Whether to allow decimals to pass-through 

235 self._use_decimal = use_decimal 

236 # A cache of objects that have already been flattened. 

237 self._flattened = {} 

238 # Used for util._is_readonly, see +483 

239 self.handle_readonly = handle_readonly 

240 

241 if self.use_base85: 

242 self._bytes_tag = tags.B85 

243 self._bytes_encoder = util.b85encode 

244 else: 

245 self._bytes_tag = tags.B64 

246 self._bytes_encoder = util.b64encode 

247 

248 # ignore exceptions 

249 self.fail_safe = fail_safe 

250 self.include_properties = include_properties 

251 

252 self._original_object = original_object 

253 

254 def _determine_sort_keys(self) -> bool: 

255 for _, options in getattr(self.backend, "_encoder_options", {}).values(): 

256 if options.get("sort_keys", False): 

257 # the user has set one of the backends to sort keys 

258 return True 

259 return False 

260 

261 def _sort_attrs(self, obj: Any) -> Any: 

262 if hasattr(obj, "__slots__") and self.warn: 

263 # Slots are read-only by default, the only way 

264 # to sort keys is to do it in a subclass 

265 # and that would require calling the init function 

266 # of the parent again. That could cause issues 

267 # so we refuse to handle it. 

268 raise TypeError( 

269 "Objects with __slots__ cannot have their keys reliably sorted by " 

270 "jsonpickle! Please sort the keys in the __slots__ definition instead." 

271 ) 

272 # Somehow some classes don't have slots or dict 

273 elif hasattr(obj, "__dict__"): 

274 try: 

275 obj.__dict__ = dict(sorted(obj.__dict__.items())) 

276 except (TypeError, AttributeError): 

277 # Can't set attributes of builtin/extension type 

278 pass 

279 return obj 

280 

281 def reset(self) -> None: 

282 self._objs = {} 

283 self._depth = -1 

284 self._seen = [] 

285 self._flattened = {} 

286 

287 def _push(self) -> None: 

288 """Steps down one level in the namespace.""" 

289 self._depth += 1 

290 

291 def _pop(self, value: Any) -> Any: 

292 """Step up one level in the namespace and return the value. 

293 If we're at the root, reset the pickler's state. 

294 """ 

295 self._depth -= 1 

296 if self._depth == -1: 

297 self.reset() 

298 return value 

299 

300 def _log_ref(self, obj: Any) -> bool: 

301 """ 

302 Log a reference to an in-memory object. 

303 Return True if this object is new and was assigned 

304 a new ID. Otherwise return False. 

305 """ 

306 objid = id(obj) 

307 is_new = objid not in self._objs 

308 if is_new: 

309 new_id = len(self._objs) 

310 self._objs[objid] = new_id 

311 return is_new 

312 

313 def _mkref(self, obj: Any) -> bool: 

314 """ 

315 Log a reference to an in-memory object, and return 

316 if that object should be considered newly logged. 

317 """ 

318 is_new = self._log_ref(obj) 

319 # Pretend the object is new 

320 pretend_new = not self.unpicklable or not self.make_refs 

321 return pretend_new or is_new 

322 

323 def _getref(self, obj: Any) -> Dict[str, int]: 

324 """Return a "py/id" entry for the specified object""" 

325 return {tags.ID: self._objs.get(id(obj))} # type: ignore[dict-item] 

326 

327 def _flatten(self, obj: Any) -> Any: 

328 """Flatten an object and its guts into a json-safe representation""" 

329 if self.unpicklable and self.make_refs: 

330 result = self._flatten_impl(obj) 

331 else: 

332 try: 

333 result = self._flattened[id(obj)] 

334 except KeyError: 

335 result = self._flattened[id(obj)] = self._flatten_impl(obj) 

336 return result 

337 

338 def flatten(self, obj: Any, reset: bool = True) -> Any: 

339 """Takes an object and returns a JSON-safe representation of it. 

340 

341 Simply returns any of the basic builtin datatypes 

342 

343 >>> p = Pickler() 

344 >>> p.flatten('hello world') == 'hello world' 

345 True 

346 >>> p.flatten(49) 

347 49 

348 >>> p.flatten(350.0) 

349 350.0 

350 >>> p.flatten(True) 

351 True 

352 >>> p.flatten(False) 

353 False 

354 >>> r = p.flatten(None) 

355 >>> r is None 

356 True 

357 >>> p.flatten(False) 

358 False 

359 >>> p.flatten([1, 2, 3, 4]) 

360 [1, 2, 3, 4] 

361 >>> p.flatten((1,2,))[tags.TUPLE] 

362 [1, 2] 

363 >>> p.flatten({'key': 'value'}) == {'key': 'value'} 

364 True 

365 """ 

366 if reset: 

367 self.reset() 

368 if self._determine_sort_keys(): 

369 obj = self._sort_attrs(obj) 

370 return self._flatten(obj) 

371 

372 def _flatten_bytestring(self, obj: bytes) -> Dict[str, str]: 

373 return {self._bytes_tag: self._bytes_encoder(obj)} 

374 

375 def _flatten_impl(self, obj: Any) -> Any: 

376 ######################################### 

377 # if obj is nonrecursive return immediately 

378 # for performance reasons we don't want to do recursive checks 

379 if type(obj) is bytes: 

380 return self._flatten_bytestring(obj) 

381 

382 # Decimal is a primitive when use_decimal is True 

383 if type(obj) in (str, bool, int, float, type(None)) or ( 

384 self._use_decimal and isinstance(obj, decimal.Decimal) 

385 ): 

386 return obj 

387 ######################################### 

388 

389 self._push() 

390 return self._pop(self._flatten_obj(obj)) 

391 

392 def _max_reached(self) -> bool: 

393 return self._depth == self._max_depth 

394 

395 def _pickle_warning(self, obj: Any) -> None: 

396 if self.warn: 

397 msg = "jsonpickle cannot pickle %r: replaced with None" % obj 

398 warnings.warn(msg) 

399 

400 def _flatten_obj(self, obj: Any) -> Any: 

401 self._seen.append(obj) 

402 

403 max_reached = self._max_reached() 

404 

405 try: 

406 in_cycle = _in_cycle(obj, self._objs, max_reached, self.make_refs) 

407 if in_cycle: 

408 # break the cycle 

409 flatten_func = repr 

410 else: 

411 flatten_func = self._get_flattener(obj) # type: ignore[assignment] 

412 

413 if flatten_func is None: 

414 self._pickle_warning(obj) 

415 return None 

416 

417 return flatten_func(obj) 

418 

419 except (KeyboardInterrupt, SystemExit) as e: 

420 raise e 

421 except Exception as e: 

422 if self.fail_safe is None: 

423 raise e 

424 else: 

425 return self.fail_safe(e) 

426 

427 def _list_recurse(self, obj: Iterable[Any]) -> List[Any]: 

428 return [self._flatten(v) for v in obj] 

429 

430 def _flatten_function(self, obj: Callable[..., Any]) -> Optional[Dict[str, str]]: 

431 if self.unpicklable: 

432 data = {tags.FUNCTION: util.importable_name(obj)} 

433 else: 

434 data = None 

435 

436 return data 

437 

438 def _getstate(self, obj: Any, data: Dict[str, Any]) -> Dict[str, Any]: 

439 state = self._flatten(obj) 

440 if self.unpicklable: 

441 data[tags.STATE] = state 

442 else: 

443 data = state 

444 return data 

445 

446 def _flatten_key_value_pair( 

447 self, k: Any, v: Any, data: Dict[Union[str, Any], Any] 

448 ) -> Dict[Union[str, Any], Any]: 

449 """Flatten a key/value pair into the passed-in dictionary.""" 

450 if not util._is_picklable(k, v): 

451 return data 

452 # TODO: use inspect.getmembers_static on 3.11+ because it avoids dynamic 

453 # attribute lookups 

454 if ( 

455 self.handle_readonly 

456 and k in {attr for attr, val in inspect.getmembers(self._original_object)} 

457 and util._is_readonly(self._original_object, k, v) 

458 ): 

459 return data 

460 

461 if k is None: 

462 k = "null" # for compatibility with common json encoders 

463 

464 if self.numeric_keys and isinstance(k, (int, float)): 

465 pass 

466 elif not isinstance(k, str): 

467 try: 

468 k = repr(k) 

469 except Exception: 

470 k = str(k) 

471 

472 data[k] = self._flatten(v) 

473 return data 

474 

475 def _flatten_obj_attrs( 

476 self, obj: Any, attrs: Iterable[str], data: Dict[str, Any] 

477 ) -> bool: 

478 flatten = self._flatten_key_value_pair 

479 ok = False 

480 for k in attrs: 

481 try: 

482 if not k.startswith("__"): 

483 value = getattr(obj, k) 

484 else: 

485 value = getattr(obj, f"_{obj.__class__.__name__}{k}") 

486 flatten(k, value, data) 

487 except AttributeError: 

488 # The attribute may have been deleted 

489 continue 

490 ok = True 

491 return ok 

492 

493 def _flatten_properties( 

494 self, 

495 obj: Any, 

496 data: Dict[str, Any], 

497 allslots: Optional[Iterable[Sequence[str]]] = None, 

498 ) -> Dict[str, Any]: 

499 if allslots is None: 

500 # setting a list as a default argument can lead to some weird errors 

501 allslots = [] 

502 

503 # convert to set in case there are a lot of slots 

504 allslots_set = set(itertools.chain.from_iterable(allslots)) 

505 

506 # i don't like lambdas 

507 def valid_property(x: tuple[str, Any]) -> bool: 

508 return not x[0].startswith("__") and x[0] not in allslots_set 

509 

510 properties = [ 

511 x[0] for x in inspect.getmembers(obj.__class__) if valid_property(x) 

512 ] 

513 

514 properties_dict = {} 

515 for p_name in properties: 

516 p_val = getattr(obj, p_name) 

517 if util._is_not_class(p_val): 

518 properties_dict[p_name] = p_val 

519 else: 

520 properties_dict[p_name] = self._flatten(p_val) 

521 

522 data[tags.PROPERTY] = properties_dict 

523 

524 return data 

525 

526 def _flatten_newstyle_with_slots( 

527 self, obj: Any, data: Dict[str, Any] 

528 ) -> Dict[str, Any]: 

529 """Return a json-friendly dict for new-style objects with __slots__.""" 

530 allslots = [ 

531 _wrap_string_slot(getattr(cls, "__slots__", tuple())) 

532 for cls in obj.__class__.mro() 

533 ] 

534 

535 # add properties to the attribute list 

536 if self.include_properties: 

537 data = self._flatten_properties(obj, data, allslots) 

538 

539 if not self._flatten_obj_attrs(obj, chain(*allslots), data): 

540 attrs = [ 

541 x for x in dir(obj) if not x.startswith("__") and not x.endswith("__") 

542 ] 

543 self._flatten_obj_attrs(obj, attrs, data) 

544 

545 return data 

546 

547 def _flatten_obj_instance( 

548 self, obj: Any 

549 ) -> Optional[Union[Dict[str, Any], List[Any], Any]]: 

550 """Recursively flatten an instance and return a json-friendly dict""" 

551 # we're generally not bothering to annotate parts that aren't part of the public API 

552 # but this annotation alone saves us 3 mypy "errors" 

553 data: Dict[str, Any] = {} 

554 has_class = hasattr(obj, "__class__") 

555 has_dict = hasattr(obj, "__dict__") 

556 has_slots = not has_dict and hasattr(obj, "__slots__") 

557 has_getnewargs = util.has_method(obj, "__getnewargs__") 

558 has_getnewargs_ex = util.has_method(obj, "__getnewargs_ex__") 

559 has_getinitargs = util.has_method(obj, "__getinitargs__") 

560 has_reduce, has_reduce_ex = util.has_reduce(obj) 

561 exclude = set(getattr(obj, "_jsonpickle_exclude", ())) 

562 

563 # Support objects with __getstate__(); this ensures that 

564 # both __setstate__() and __getstate__() are implemented 

565 has_own_getstate = hasattr(type(obj), "__getstate__") and type( 

566 obj 

567 ).__getstate__ is not getattr(object, "__getstate__", None) 

568 # not using has_method since __getstate__() is handled separately below 

569 # Note: on Python 3.11+, all objects have __getstate__. 

570 

571 if has_class: 

572 cls = obj.__class__ 

573 else: 

574 cls = type(obj) 

575 

576 # Check for a custom handler 

577 class_name = util.importable_name(cls) 

578 handler = handlers.get(cls, handlers.get(class_name)) # type: ignore[arg-type] 

579 if handler is not None: 

580 if self.unpicklable: 

581 data[tags.OBJECT] = class_name 

582 result = handler(self).flatten(obj, data) 

583 if result is None: 

584 self._pickle_warning(obj) 

585 return result 

586 

587 reduce_val = None 

588 

589 if self.include_properties: 

590 data = self._flatten_properties(obj, data) 

591 

592 if self.unpicklable: 

593 if has_reduce and not has_reduce_ex: 

594 try: 

595 reduce_val = obj.__reduce__() 

596 except TypeError: 

597 # A lot of builtin types have a reduce which 

598 # just raises a TypeError 

599 # we ignore those 

600 pass 

601 

602 # test for a reduce implementation, and redirect before 

603 # doing anything else if that is what reduce requests 

604 elif has_reduce_ex: 

605 try: 

606 # we're implementing protocol 2 

607 reduce_val = obj.__reduce_ex__(2) 

608 except TypeError: 

609 # A lot of builtin types have a reduce which 

610 # just raises a TypeError 

611 # we ignore those 

612 pass 

613 

614 if reduce_val and isinstance(reduce_val, str): 

615 try: 

616 varpath = iter(reduce_val.split(".")) 

617 # curmod will be transformed by the 

618 # loop into the value to pickle 

619 curmod = sys.modules[next(varpath)] 

620 for modname in varpath: 

621 curmod = getattr(curmod, modname) 

622 # replace obj with value retrieved 

623 return self._flatten(curmod) 

624 except KeyError: 

625 # well, we can't do anything with that, so we ignore it 

626 pass 

627 

628 elif reduce_val: 

629 # at this point, reduce_val should be some kind of iterable 

630 # pad out to len 5 

631 rv_as_list = list(reduce_val) 

632 insufficiency = 5 - len(rv_as_list) 

633 if insufficiency: 

634 rv_as_list += [None] * insufficiency 

635 

636 if getattr(rv_as_list[0], "__name__", "") == "__newobj__": 

637 rv_as_list[0] = tags.NEWOBJ 

638 

639 f, args, state, listitems, dictitems = rv_as_list 

640 

641 # check that getstate/setstate is sane 

642 if not ( 

643 state 

644 and has_own_getstate 

645 and not hasattr(obj, "__setstate__") 

646 and not isinstance(obj, dict) 

647 ): 

648 # turn iterators to iterables for convenient serialization 

649 if rv_as_list[3]: 

650 rv_as_list[3] = tuple(rv_as_list[3]) 

651 

652 if rv_as_list[4]: 

653 rv_as_list[4] = tuple(rv_as_list[4]) 

654 

655 reduce_args = list(map(self._flatten, rv_as_list)) 

656 last_index = len(reduce_args) - 1 

657 while last_index >= 2 and reduce_args[last_index] is None: 

658 last_index -= 1 

659 data[tags.REDUCE] = reduce_args[: last_index + 1] 

660 

661 return data 

662 

663 if has_class and not isinstance(obj, types.ModuleType): 

664 if self.unpicklable: 

665 data[tags.OBJECT] = class_name 

666 

667 if has_getnewargs_ex: 

668 data[tags.NEWARGSEX] = [ 

669 self._flatten(arg) for arg in obj.__getnewargs_ex__() 

670 ] 

671 

672 if has_getnewargs and not has_getnewargs_ex: 

673 data[tags.NEWARGS] = self._flatten(obj.__getnewargs__()) 

674 

675 if has_getinitargs: 

676 data[tags.INITARGS] = self._flatten(obj.__getinitargs__()) 

677 

678 if has_own_getstate: 

679 try: 

680 state = obj.__getstate__() 

681 except TypeError: 

682 # Has getstate but it cannot be called, e.g. file descriptors 

683 # in Python3 

684 self._pickle_warning(obj) 

685 return None 

686 else: 

687 if state: 

688 return self._getstate(state, data) 

689 

690 if isinstance(obj, types.ModuleType): 

691 if self.unpicklable: 

692 data[tags.MODULE] = "{name}/{name}".format(name=obj.__name__) 

693 else: 

694 # TODO: this causes a mypy assignment error, figure out 

695 # if it's actually an error or a false alarm 

696 data = str(obj) # type: ignore[assignment] 

697 return data 

698 

699 if util._is_dictionary_subclass(obj): 

700 self._flatten_dict_obj(obj, data, exclude=exclude) 

701 return data 

702 

703 if util._is_sequence_subclass(obj): 

704 return self._flatten_sequence_obj(obj, data) 

705 

706 if util._is_iterator(obj): 

707 # force list in python 3 

708 data[tags.ITERATOR] = list(map(self._flatten, islice(obj, self._max_iter))) 

709 return data 

710 

711 if has_dict: 

712 # Support objects that subclasses list and set 

713 if util._is_sequence_subclass(obj): 

714 return self._flatten_sequence_obj(obj, data) 

715 

716 # hack for zope persistent objects; this unghostifies the object 

717 getattr(obj, "_", None) 

718 return self._flatten_dict_obj(obj.__dict__, data, exclude=exclude) 

719 

720 if has_slots: 

721 return self._flatten_newstyle_with_slots(obj, data) 

722 

723 # catchall return for data created above without a return 

724 # (e.g. __getnewargs__ is not supposed to be the end of the story) 

725 if data: 

726 return data 

727 

728 self._pickle_warning(obj) 

729 return None 

730 

731 def _ref_obj_instance(self, obj: Any) -> Optional[Union[Dict[str, Any], List[Any]]]: 

732 """Reference an existing object or flatten if new""" 

733 if self.unpicklable: 

734 if self._mkref(obj): 

735 # We've never seen this object so return its 

736 # json representation. 

737 return self._flatten_obj_instance(obj) 

738 # We've seen this object before so place an object 

739 # reference tag in the data. This avoids infinite recursion 

740 # when processing cyclical objects. 

741 return self._getref(obj) 

742 else: 

743 max_reached = self._max_reached() 

744 in_cycle = _in_cycle(obj, self._objs, max_reached, False) 

745 if in_cycle: 

746 # A circular becomes None. 

747 return None 

748 

749 self._mkref(obj) 

750 return self._flatten_obj_instance(obj) 

751 

752 def _escape_key(self, k: Any) -> str: 

753 return tags.JSON_KEY + encode( 

754 k, 

755 reset=False, 

756 keys=True, 

757 context=self, 

758 backend=self.backend, 

759 make_refs=self.make_refs, 

760 ) 

761 

762 def _flatten_non_string_key_value_pair( 

763 self, k: Any, v: Any, data: Dict[str, Any] 

764 ) -> Dict[str, Any]: 

765 """Flatten only non-string key/value pairs""" 

766 if not util._is_picklable(k, v): 

767 return data 

768 if self.keys and not isinstance(k, str): 

769 k = self._escape_key(k) 

770 data[k] = self._flatten(v) 

771 return data 

772 

773 def _flatten_string_key_value_pair( 

774 self, k: str, v: Any, data: Dict[str, Any] 

775 ) -> Dict[str, Any]: 

776 """Flatten string key/value pairs only.""" 

777 if not util._is_picklable(k, v): 

778 return data 

779 if self.keys: 

780 if not isinstance(k, str): 

781 return data 

782 elif k.startswith(tags.JSON_KEY): 

783 k = self._escape_key(k) 

784 else: 

785 if k is None: 

786 k = "null" # for compatibility with common json encoders 

787 

788 if self.numeric_keys and isinstance(k, (int, float)): 

789 pass 

790 elif not isinstance(k, str): 

791 try: 

792 k = repr(k) 

793 except Exception: 

794 k = str(k) 

795 

796 data[k] = self._flatten(v) 

797 return data 

798 

799 def _flatten_dict_obj( 

800 self, 

801 obj: dict[Any, Any], 

802 data: Optional[Dict[Any, Any]] = None, 

803 exclude: Iterable[Any] = (), 

804 ) -> Dict[str, Any]: 

805 """Recursively call flatten() and return json-friendly dict""" 

806 if data is None: 

807 data = obj.__class__() 

808 

809 # If we allow non-string keys then we have to do a two-phase 

810 # encoding to ensure that the reference IDs are deterministic. 

811 if self.keys: 

812 # Phase 1: serialize regular objects, ignore fancy keys. 

813 flatten = self._flatten_string_key_value_pair 

814 for k, v in util.items(obj, exclude=exclude): 

815 flatten(k, v, data) 

816 

817 # Phase 2: serialize non-string keys. 

818 flatten = self._flatten_non_string_key_value_pair 

819 for k, v in util.items(obj, exclude=exclude): 

820 flatten(k, v, data) 

821 else: 

822 # If we have string keys only then we only need a single pass. 

823 flatten = self._flatten_key_value_pair 

824 for k, v in util.items(obj, exclude=exclude): 

825 flatten(k, v, data) 

826 

827 # the collections.defaultdict protocol 

828 if hasattr(obj, "default_factory") and callable(obj.default_factory): 

829 factory = obj.default_factory 

830 if util._is_type(factory): 

831 # Reference the class/type 

832 # in this case it's Dict[str, str] 

833 value: Dict[str, str] = _mktyperef(factory) 

834 else: 

835 # The factory is not a type and could reference e.g. functions 

836 # or even the object instance itself, which creates a cycle. 

837 if self._mkref(factory): 

838 # We've never seen this object before so pickle it in-place. 

839 # Create an instance from the factory and assume that the 

840 # resulting instance is a suitable exemplar. 

841 value: Dict[str, Any] = self._flatten_obj_instance(handlers.CloneFactory(factory())) # type: ignore[no-redef] 

842 else: 

843 # We've seen this object before. 

844 # Break the cycle by emitting a reference. 

845 # in this case it's Dict[str, int] 

846 value: Dict[str, int] = self._getref(factory) # type: ignore[no-redef] 

847 data["default_factory"] = value 

848 

849 # Sub-classes of dict 

850 if hasattr(obj, "__dict__") and self.unpicklable and obj != obj.__dict__: 

851 if self._mkref(obj.__dict__): 

852 dict_data = {} 

853 self._flatten_dict_obj(obj.__dict__, dict_data, exclude=exclude) 

854 data["__dict__"] = dict_data 

855 else: 

856 data["__dict__"] = self._getref(obj.__dict__) 

857 

858 return data 

859 

860 def _get_flattener(self, obj: Any) -> Optional[Callable[[Any], Any]]: 

861 if type(obj) in (list, dict): 

862 if self._mkref(obj): 

863 return ( 

864 self._list_recurse if type(obj) is list else self._flatten_dict_obj 

865 ) 

866 else: 

867 return self._getref 

868 

869 # We handle tuples and sets by encoding them in a "(tuple|set)dict" 

870 elif type(obj) in (tuple, set): 

871 if not self.unpicklable: 

872 return self._list_recurse 

873 return lambda obj: { 

874 tags.TUPLE if type(obj) is tuple else tags.SET: [ 

875 self._flatten(v) for v in obj 

876 ] 

877 } 

878 

879 elif util._is_module_function(obj): 

880 return self._flatten_function 

881 

882 elif util._is_object(obj): 

883 return self._ref_obj_instance 

884 

885 elif util._is_type(obj): 

886 return _mktyperef 

887 

888 # instance methods, lambdas, old style classes... 

889 self._pickle_warning(obj) 

890 return None 

891 

892 def _flatten_sequence_obj( 

893 self, obj: Iterable[Any], data: Dict[str, Any] 

894 ) -> Union[Dict[str, Any], List[Any]]: 

895 """Return a json-friendly dict for a sequence subclass.""" 

896 if hasattr(obj, "__dict__"): 

897 self._flatten_dict_obj(obj.__dict__, data) 

898 value = [self._flatten(v) for v in obj] 

899 if self.unpicklable: 

900 data[tags.SEQ] = value 

901 else: 

902 return value 

903 return data