Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/jsonpickle/pickler.py: 12%

390 statements  

« prev     ^ index     » next       coverage.py v7.2.7, created at 2023-06-07 06:20 +0000

1# Copyright (C) 2008 John Paulett (john -at- paulett.org) 

2# Copyright (C) 2009-2018 David Aguilar (davvid -at- gmail.com) 

3# All rights reserved. 

4# 

5# This software is licensed as described in the file COPYING, which 

6# you should have received as part of this distribution. 

7from __future__ import absolute_import, division, unicode_literals 

8 

9import decimal 

10import inspect 

11import itertools 

12import sys 

13import warnings 

14from itertools import chain, islice 

15 

16from . import compat, handlers, tags, util 

17from .backend import json 

18from .compat import numeric_types, string_types 

19 

20 

21def encode( 

22 value, 

23 unpicklable=True, 

24 make_refs=True, 

25 keys=False, 

26 max_depth=None, 

27 reset=True, 

28 backend=None, 

29 warn=False, 

30 context=None, 

31 max_iter=None, 

32 use_decimal=False, 

33 numeric_keys=False, 

34 use_base85=False, 

35 fail_safe=None, 

36 indent=None, 

37 separators=None, 

38 include_properties=False, 

39): 

40 """Return a JSON formatted representation of value, a Python object. 

41 

42 :param unpicklable: If set to ``False`` then the output will not contain the 

43 information necessary to turn the JSON data back into Python objects, 

44 but a simpler JSON stream is produced. It's recommended to set this 

45 parameter to ``False`` when your code does not rely on two objects 

46 having the same ``id()`` value, and when it is sufficient for those two 

47 objects to be equal by ``==``, such as when serializing sklearn 

48 instances. If you experience (de)serialization being incorrect when you 

49 use numpy, pandas, or sklearn handlers, this should be set to ``False``. 

50 If you want the output to not include the dtype for numpy arrays, add 

51 ``jsonpickle.register(numpy.generic, 

52 UnpicklableNumpyGenericHandler, base=True)`` before your pickling code. 

53 :param max_depth: If set to a non-negative integer then jsonpickle will 

54 not recurse deeper than 'max_depth' steps into the object. Anything 

55 deeper than 'max_depth' is represented using a Python repr() of the 

56 object. 

57 :param make_refs: If set to False jsonpickle's referencing support is 

58 disabled. Objects that are id()-identical won't be preserved across 

59 encode()/decode(), but the resulting JSON stream will be conceptually 

60 simpler. jsonpickle detects cyclical objects and will break the cycle 

61 by calling repr() instead of recursing when make_refs is set False. 

62 :param keys: If set to True then jsonpickle will encode non-string 

63 dictionary keys instead of coercing them into strings via `repr()`. 

64 This is typically what you want if you need to support Integer or 

65 objects as dictionary keys. 

66 :param numeric_keys: Only use this option if the backend supports integer 

67 dict keys natively. This flag tells jsonpickle to leave numeric keys 

68 as-is rather than conforming them to json-friendly strings. 

69 Using ``keys=True`` is the typical solution for integer keys, so only 

70 use this if you have a specific use case where you want to allow the 

71 backend to handle serialization of numeric dict keys. 

72 :param warn: If set to True then jsonpickle will warn when it 

73 returns None for an object which it cannot pickle 

74 (e.g. file descriptors). 

75 :param max_iter: If set to a non-negative integer then jsonpickle will 

76 consume at most `max_iter` items when pickling iterators. 

77 :param use_decimal: If set to True jsonpickle will allow Decimal 

78 instances to pass-through, with the assumption that the simplejson 

79 backend will be used in `use_decimal` mode. In order to use this mode 

80 you will need to configure simplejson:: 

81 

82 jsonpickle.set_encoder_options('simplejson', 

83 use_decimal=True, sort_keys=True) 

84 jsonpickle.set_decoder_options('simplejson', 

85 use_decimal=True) 

86 jsonpickle.set_preferred_backend('simplejson') 

87 

88 NOTE: A side-effect of the above settings is that float values will be 

89 converted to Decimal when converting to json. 

90 :param use_base85: 

91 If possible, use base85 to encode binary data. Base85 bloats binary data 

92 by 1/4 as opposed to base64, which expands it by 1/3. This argument is 

93 ignored on Python 2 because it doesn't support it. 

94 :param fail_safe: If set to a function exceptions are ignored when pickling 

95 and if a exception happens the function is called and the return value 

96 is used as the value for the object that caused the error 

97 :param indent: When `indent` is a non-negative integer, then JSON array 

98 elements and object members will be pretty-printed with that indent 

99 level. An indent level of 0 will only insert newlines. ``None`` is 

100 the most compact representation. Since the default item separator is 

101 ``(', ', ': ')``, the output might include trailing whitespace when 

102 ``indent`` is specified. You can use ``separators=(',', ': ')`` to 

103 avoid this. This value is passed directly to the active JSON backend 

104 library and not used by jsonpickle directly. 

105 :param separators: 

106 If ``separators`` is an ``(item_separator, dict_separator)`` tuple 

107 then it will be used instead of the default ``(', ', ': ')`` 

108 separators. ``(',', ':')`` is the most compact JSON representation. 

109 This value is passed directly to the active JSON backend library and 

110 not used by jsonpickle directly. 

111 :param include_properties: 

112 Include the names and values of class properties in the generated json. 

113 Properties are unpickled properly regardless of this setting, this is 

114 meant to be used if processing the json outside of Python. Certain types 

115 such as sets will not pickle due to not having a native-json equivalent. 

116 Defaults to ``False``. 

117 

118 >>> encode('my string') == '"my string"' 

119 True 

120 >>> encode(36) == '36' 

121 True 

122 >>> encode({'foo': True}) == '{"foo": true}' 

123 True 

124 >>> encode({'foo': [1, 2, [3, 4]]}, max_depth=1) 

125 '{"foo": "[1, 2, [3, 4]]"}' 

126 

127 """ 

128 backend = backend or json 

129 context = context or Pickler( 

130 unpicklable=unpicklable, 

131 make_refs=make_refs, 

132 keys=keys, 

133 backend=backend, 

134 max_depth=max_depth, 

135 warn=warn, 

136 max_iter=max_iter, 

137 numeric_keys=numeric_keys, 

138 use_decimal=use_decimal, 

139 use_base85=use_base85, 

140 fail_safe=fail_safe, 

141 include_properties=include_properties, 

142 ) 

143 return backend.encode( 

144 context.flatten(value, reset=reset), indent=indent, separators=separators 

145 ) 

146 

147 

148def _in_cycle(obj, objs, max_reached, make_refs): 

149 """Detect cyclic structures that would lead to infinite recursion""" 

150 return ( 

151 (max_reached or (not make_refs and id(obj) in objs)) 

152 and not util.is_primitive(obj) 

153 and not util.is_enum(obj) 

154 ) 

155 

156 

157def _mktyperef(obj): 

158 """Return a typeref dictionary 

159 

160 >>> _mktyperef(AssertionError) == {'py/type': 'builtins.AssertionError'} 

161 True 

162 

163 """ 

164 return {tags.TYPE: util.importable_name(obj)} 

165 

166 

167def _wrap_string_slot(string): 

168 """Converts __slots__ = 'a' into __slots__ = ('a',)""" 

169 if isinstance(string, string_types): 

170 return (string,) 

171 return string 

172 

173 

174class Pickler(object): 

175 def __init__( 

176 self, 

177 unpicklable=True, 

178 make_refs=True, 

179 max_depth=None, 

180 backend=None, 

181 keys=False, 

182 warn=False, 

183 max_iter=None, 

184 numeric_keys=False, 

185 use_decimal=False, 

186 use_base85=False, 

187 fail_safe=None, 

188 include_properties=False, 

189 ): 

190 self.unpicklable = unpicklable 

191 self.make_refs = make_refs 

192 self.backend = backend or json 

193 self.keys = keys 

194 self.warn = warn 

195 self.numeric_keys = numeric_keys 

196 self.use_base85 = use_base85 

197 # The current recursion depth 

198 self._depth = -1 

199 # The maximal recursion depth 

200 self._max_depth = max_depth 

201 # Maps id(obj) to reference IDs 

202 self._objs = {} 

203 # Avoids garbage collection 

204 self._seen = [] 

205 # maximum amount of items to take from a pickled iterator 

206 self._max_iter = max_iter 

207 # Whether to allow decimals to pass-through 

208 self._use_decimal = use_decimal 

209 # A cache of objects that have already been flattened. 

210 self._flattened = {} 

211 

212 if self.use_base85: 

213 self._bytes_tag = tags.B85 

214 self._bytes_encoder = util.b85encode 

215 else: 

216 self._bytes_tag = tags.B64 

217 self._bytes_encoder = util.b64encode 

218 

219 # ignore exceptions 

220 self.fail_safe = fail_safe 

221 self.include_properties = include_properties 

222 

223 def _determine_sort_keys(self): 

224 for _, options in getattr(self.backend, '_encoder_options', {}).values(): 

225 if options.get("sort_keys", False): 

226 # the user has set one of the backends to sort keys 

227 return True 

228 return False 

229 

230 def _sort_attrs(self, obj): 

231 if hasattr(obj, "__slots__") and self.warn: 

232 # Slots are read-only by default, the only way 

233 # to sort keys is to do it in a subclass 

234 # and that would require calling the init function 

235 # of the parent again. That could cause issues 

236 # so we refuse to handle it. 

237 raise TypeError( 

238 "Objects with __slots__ cannot have their keys reliably sorted by jsonpickle! Please sort the keys in the __slots__ definition instead." 

239 ) 

240 # Somehow some classes don't have slots or dict 

241 elif hasattr(obj, "__dict__"): 

242 try: 

243 obj.__dict__ = dict(sorted(obj.__dict__.items())) 

244 except (TypeError, AttributeError): 

245 # Can't set attributes of builtin/extension type 

246 pass 

247 return obj 

248 

249 def reset(self): 

250 self._objs = {} 

251 self._depth = -1 

252 self._seen = [] 

253 self._flattened = {} 

254 

255 def _push(self): 

256 """Steps down one level in the namespace.""" 

257 self._depth += 1 

258 

259 def _pop(self, value): 

260 """Step up one level in the namespace and return the value. 

261 If we're at the root, reset the pickler's state. 

262 """ 

263 self._depth -= 1 

264 if self._depth == -1: 

265 self.reset() 

266 return value 

267 

268 def _log_ref(self, obj): 

269 """ 

270 Log a reference to an in-memory object. 

271 Return True if this object is new and was assigned 

272 a new ID. Otherwise return False. 

273 """ 

274 objid = id(obj) 

275 is_new = objid not in self._objs 

276 if is_new: 

277 new_id = len(self._objs) 

278 self._objs[objid] = new_id 

279 return is_new 

280 

281 def _mkref(self, obj): 

282 """ 

283 Log a reference to an in-memory object, and return 

284 if that object should be considered newly logged. 

285 """ 

286 is_new = self._log_ref(obj) 

287 # Pretend the object is new 

288 pretend_new = not self.unpicklable or not self.make_refs 

289 return pretend_new or is_new 

290 

291 def _getref(self, obj): 

292 return {tags.ID: self._objs.get(id(obj))} 

293 

294 def _flatten(self, obj): 

295 if self.unpicklable and self.make_refs: 

296 result = self._flatten_impl(obj) 

297 else: 

298 try: 

299 result = self._flattened[id(obj)] 

300 except KeyError: 

301 result = self._flattened[id(obj)] = self._flatten_impl(obj) 

302 return result 

303 

304 def flatten(self, obj, reset=True): 

305 """Takes an object and returns a JSON-safe representation of it. 

306 

307 Simply returns any of the basic builtin datatypes 

308 

309 >>> p = Pickler() 

310 >>> p.flatten('hello world') == 'hello world' 

311 True 

312 >>> p.flatten(49) 

313 49 

314 >>> p.flatten(350.0) 

315 350.0 

316 >>> p.flatten(True) 

317 True 

318 >>> p.flatten(False) 

319 False 

320 >>> r = p.flatten(None) 

321 >>> r is None 

322 True 

323 >>> p.flatten(False) 

324 False 

325 >>> p.flatten([1, 2, 3, 4]) 

326 [1, 2, 3, 4] 

327 >>> p.flatten((1,2,))[tags.TUPLE] 

328 [1, 2] 

329 >>> p.flatten({'key': 'value'}) == {'key': 'value'} 

330 True 

331 """ 

332 if reset: 

333 self.reset() 

334 if self._determine_sort_keys(): 

335 obj = self._sort_attrs(obj) 

336 return self._flatten(obj) 

337 

338 def _flatten_bytestring(self, obj): 

339 return {self._bytes_tag: self._bytes_encoder(obj)} 

340 

341 def _flatten_impl(self, obj): 

342 ######################################### 

343 # if obj is nonrecursive return immediately 

344 # for performance reasons we don't want to do recursive checks 

345 if type(obj) is bytes: 

346 return self._flatten_bytestring(obj) 

347 

348 # Decimal is a primitive when use_decimal is True 

349 if type(obj) in util.PRIMITIVES or ( 

350 self._use_decimal and isinstance(obj, decimal.Decimal) 

351 ): 

352 return obj 

353 ######################################### 

354 

355 self._push() 

356 return self._pop(self._flatten_obj(obj)) 

357 

358 def _max_reached(self): 

359 return self._depth == self._max_depth 

360 

361 def _pickle_warning(self, obj): 

362 if self.warn: 

363 msg = 'jsonpickle cannot pickle %r: replaced with None' % obj 

364 warnings.warn(msg) 

365 

366 def _flatten_obj(self, obj): 

367 self._seen.append(obj) 

368 

369 max_reached = self._max_reached() 

370 

371 try: 

372 in_cycle = _in_cycle(obj, self._objs, max_reached, self.make_refs) 

373 if in_cycle: 

374 # break the cycle 

375 flatten_func = repr 

376 else: 

377 flatten_func = self._get_flattener(obj) 

378 

379 if flatten_func is None: 

380 self._pickle_warning(obj) 

381 return None 

382 

383 return flatten_func(obj) 

384 

385 except (KeyboardInterrupt, SystemExit) as e: 

386 raise e 

387 except Exception as e: 

388 if self.fail_safe is None: 

389 raise e 

390 else: 

391 return self.fail_safe(e) 

392 

393 def _list_recurse(self, obj): 

394 return [self._flatten(v) for v in obj] 

395 

396 def _flatten_function(self, obj): 

397 if self.unpicklable: 

398 data = {tags.FUNCTION: util.importable_name(obj)} 

399 else: 

400 data = None 

401 

402 return data 

403 

404 def _getstate(self, obj, data): 

405 state = self._flatten(obj) 

406 if self.unpicklable: 

407 data[tags.STATE] = state 

408 else: 

409 data = state 

410 return data 

411 

412 def _flatten_key_value_pair(self, k, v, data): 

413 """Flatten a key/value pair into the passed-in dictionary.""" 

414 if not util.is_picklable(k, v): 

415 return data 

416 

417 if k is None: 

418 k = 'null' # for compatibility with common json encoders 

419 

420 if self.numeric_keys and isinstance(k, numeric_types): 

421 pass 

422 elif not isinstance(k, string_types): 

423 try: 

424 k = repr(k) 

425 except Exception: 

426 k = compat.ustr(k) 

427 

428 data[k] = self._flatten(v) 

429 return data 

430 

431 def _flatten_obj_attrs(self, obj, attrs, data): 

432 flatten = self._flatten_key_value_pair 

433 ok = False 

434 for k in attrs: 

435 try: 

436 if not k.startswith('__'): 

437 value = getattr(obj, k) 

438 else: 

439 value = getattr(obj, f"_{obj.__class__.__name__}{k}") 

440 flatten(k, value, data) 

441 except AttributeError: 

442 # The attribute may have been deleted 

443 continue 

444 ok = True 

445 return ok 

446 

447 def _flatten_properties(self, obj, data, allslots=None): 

448 if allslots is None: 

449 # setting a list as a default argument can lead to some weird errors 

450 allslots = [] 

451 

452 # convert to set in case there are a lot of slots 

453 allslots_set = set(itertools.chain.from_iterable(allslots)) 

454 

455 # i don't like lambdas 

456 def valid_property(x): 

457 return not x[0].startswith("__") and x[0] not in allslots_set 

458 

459 properties = [ 

460 x[0] for x in inspect.getmembers(obj.__class__) if valid_property(x) 

461 ] 

462 

463 properties_dict = {} 

464 for p_name in properties: 

465 p_val = getattr(obj, p_name) 

466 if util.is_not_class(p_val): 

467 properties_dict[p_name] = p_val 

468 else: 

469 properties_dict[p_name] = self._flatten(p_val) 

470 

471 data[tags.PROPERTY] = properties_dict 

472 

473 return data 

474 

475 def _flatten_newstyle_with_slots(self, obj, data): 

476 """Return a json-friendly dict for new-style objects with __slots__.""" 

477 allslots = [ 

478 _wrap_string_slot(getattr(cls, '__slots__', tuple())) 

479 for cls in obj.__class__.mro() 

480 ] 

481 

482 # add properties to the attribute list 

483 if self.include_properties: 

484 data = self._flatten_properties(obj, data, allslots) 

485 

486 if not self._flatten_obj_attrs(obj, chain(*allslots), data): 

487 attrs = [ 

488 x for x in dir(obj) if not x.startswith('__') and not x.endswith('__') 

489 ] 

490 self._flatten_obj_attrs(obj, attrs, data) 

491 

492 return data 

493 

494 def _flatten_obj_instance(self, obj): 

495 """Recursively flatten an instance and return a json-friendly dict""" 

496 data = {} 

497 has_class = hasattr(obj, '__class__') 

498 has_dict = hasattr(obj, '__dict__') 

499 has_slots = not has_dict and hasattr(obj, '__slots__') 

500 has_getnewargs = util.has_method(obj, '__getnewargs__') 

501 has_getnewargs_ex = util.has_method(obj, '__getnewargs_ex__') 

502 has_getinitargs = util.has_method(obj, '__getinitargs__') 

503 has_reduce, has_reduce_ex = util.has_reduce(obj) 

504 

505 # Support objects with __getstate__(); this ensures that 

506 # both __setstate__() and __getstate__() are implemented 

507 has_own_getstate = hasattr(type(obj), '__getstate__') and type( 

508 obj 

509 ).__getstate__ is not getattr(object, '__getstate__', None) 

510 # not using has_method since __getstate__() is handled separately below 

511 # Note: on Python 3.11+, all objects have __getstate__. 

512 

513 if has_class: 

514 cls = obj.__class__ 

515 else: 

516 cls = type(obj) 

517 

518 # Check for a custom handler 

519 class_name = util.importable_name(cls) 

520 handler = handlers.get(cls, handlers.get(class_name)) 

521 if handler is not None: 

522 if self.unpicklable: 

523 data[tags.OBJECT] = class_name 

524 result = handler(self).flatten(obj, data) 

525 if result is None: 

526 self._pickle_warning(obj) 

527 return result 

528 

529 reduce_val = None 

530 

531 if self.include_properties: 

532 data = self._flatten_properties(obj, data) 

533 

534 if self.unpicklable: 

535 if has_reduce and not has_reduce_ex: 

536 try: 

537 reduce_val = obj.__reduce__() 

538 except TypeError: 

539 # A lot of builtin types have a reduce which 

540 # just raises a TypeError 

541 # we ignore those 

542 pass 

543 

544 # test for a reduce implementation, and redirect before 

545 # doing anything else if that is what reduce requests 

546 elif has_reduce_ex: 

547 try: 

548 # we're implementing protocol 2 

549 reduce_val = obj.__reduce_ex__(2) 

550 except TypeError: 

551 # A lot of builtin types have a reduce which 

552 # just raises a TypeError 

553 # we ignore those 

554 pass 

555 

556 if reduce_val and isinstance(reduce_val, string_types): 

557 try: 

558 varpath = iter(reduce_val.split('.')) 

559 # curmod will be transformed by the 

560 # loop into the value to pickle 

561 curmod = sys.modules[next(varpath)] 

562 for modname in varpath: 

563 curmod = getattr(curmod, modname) 

564 # replace obj with value retrieved 

565 return self._flatten(curmod) 

566 except KeyError: 

567 # well, we can't do anything with that, so we ignore it 

568 pass 

569 

570 elif reduce_val: 

571 # at this point, reduce_val should be some kind of iterable 

572 # pad out to len 5 

573 rv_as_list = list(reduce_val) 

574 insufficiency = 5 - len(rv_as_list) 

575 if insufficiency: 

576 rv_as_list += [None] * insufficiency 

577 

578 if getattr(rv_as_list[0], '__name__', '') == '__newobj__': 

579 rv_as_list[0] = tags.NEWOBJ 

580 

581 f, args, state, listitems, dictitems = rv_as_list 

582 

583 # check that getstate/setstate is sane 

584 if not ( 

585 state 

586 and has_own_getstate 

587 and not hasattr(obj, '__setstate__') 

588 and not isinstance(obj, dict) 

589 ): 

590 # turn iterators to iterables for convenient serialization 

591 if rv_as_list[3]: 

592 rv_as_list[3] = tuple(rv_as_list[3]) 

593 

594 if rv_as_list[4]: 

595 rv_as_list[4] = tuple(rv_as_list[4]) 

596 

597 reduce_args = list(map(self._flatten, rv_as_list)) 

598 last_index = len(reduce_args) - 1 

599 while last_index >= 2 and reduce_args[last_index] is None: 

600 last_index -= 1 

601 data[tags.REDUCE] = reduce_args[: last_index + 1] 

602 

603 return data 

604 

605 if has_class and not util.is_module(obj): 

606 if self.unpicklable: 

607 data[tags.OBJECT] = class_name 

608 

609 if has_getnewargs_ex: 

610 data[tags.NEWARGSEX] = list(map(self._flatten, obj.__getnewargs_ex__())) 

611 

612 if has_getnewargs and not has_getnewargs_ex: 

613 data[tags.NEWARGS] = self._flatten(obj.__getnewargs__()) 

614 

615 if has_getinitargs: 

616 data[tags.INITARGS] = self._flatten(obj.__getinitargs__()) 

617 

618 if has_own_getstate: 

619 try: 

620 state = obj.__getstate__() 

621 except TypeError: 

622 # Has getstate but it cannot be called, e.g. file descriptors 

623 # in Python3 

624 self._pickle_warning(obj) 

625 return None 

626 else: 

627 if state: 

628 return self._getstate(state, data) 

629 

630 if util.is_module(obj): 

631 if self.unpicklable: 

632 data[tags.REPR] = '{name}/{name}'.format(name=obj.__name__) 

633 else: 

634 data = compat.ustr(obj) 

635 return data 

636 

637 if util.is_dictionary_subclass(obj): 

638 self._flatten_dict_obj(obj, data) 

639 return data 

640 

641 if util.is_sequence_subclass(obj): 

642 return self._flatten_sequence_obj(obj, data) 

643 

644 if util.is_iterator(obj): 

645 # force list in python 3 

646 data[tags.ITERATOR] = list(map(self._flatten, islice(obj, self._max_iter))) 

647 return data 

648 

649 if has_dict: 

650 # Support objects that subclasses list and set 

651 if util.is_sequence_subclass(obj): 

652 return self._flatten_sequence_obj(obj, data) 

653 

654 # hack for zope persistent objects; this unghostifies the object 

655 getattr(obj, '_', None) 

656 return self._flatten_dict_obj(obj.__dict__, data) 

657 

658 if has_slots: 

659 return self._flatten_newstyle_with_slots(obj, data) 

660 

661 # catchall return for data created above without a return 

662 # (e.g. __getnewargs__ is not supposed to be the end of the story) 

663 if data: 

664 return data 

665 

666 self._pickle_warning(obj) 

667 return None 

668 

669 def _ref_obj_instance(self, obj): 

670 """Reference an existing object or flatten if new""" 

671 if self.unpicklable: 

672 if self._mkref(obj): 

673 # We've never seen this object so return its 

674 # json representation. 

675 return self._flatten_obj_instance(obj) 

676 # We've seen this object before so place an object 

677 # reference tag in the data. This avoids infinite recursion 

678 # when processing cyclical objects. 

679 return self._getref(obj) 

680 else: 

681 max_reached = self._max_reached() 

682 in_cycle = _in_cycle(obj, self._objs, max_reached, False) 

683 if in_cycle: 

684 # A circular becomes None. 

685 return None 

686 

687 self._mkref(obj) 

688 return self._flatten_obj_instance(obj) 

689 

690 def _escape_key(self, k): 

691 return tags.JSON_KEY + encode( 

692 k, 

693 reset=False, 

694 keys=True, 

695 context=self, 

696 backend=self.backend, 

697 make_refs=self.make_refs, 

698 ) 

699 

700 def _flatten_non_string_key_value_pair(self, k, v, data): 

701 """Flatten only non-string key/value pairs""" 

702 if not util.is_picklable(k, v): 

703 return data 

704 if self.keys and not isinstance(k, string_types): 

705 k = self._escape_key(k) 

706 data[k] = self._flatten(v) 

707 return data 

708 

709 def _flatten_string_key_value_pair(self, k, v, data): 

710 """Flatten string key/value pairs only.""" 

711 if not util.is_picklable(k, v): 

712 return data 

713 if self.keys: 

714 if not isinstance(k, string_types): 

715 return data 

716 elif k.startswith(tags.JSON_KEY): 

717 k = self._escape_key(k) 

718 else: 

719 if k is None: 

720 k = 'null' # for compatibility with common json encoders 

721 

722 if self.numeric_keys and isinstance(k, numeric_types): 

723 pass 

724 elif not isinstance(k, string_types): 

725 try: 

726 k = repr(k) 

727 except Exception: 

728 k = compat.ustr(k) 

729 

730 data[k] = self._flatten(v) 

731 return data 

732 

733 def _flatten_dict_obj(self, obj, data=None): 

734 """Recursively call flatten() and return json-friendly dict""" 

735 if data is None: 

736 data = obj.__class__() 

737 

738 # If we allow non-string keys then we have to do a two-phase 

739 # encoding to ensure that the reference IDs are deterministic. 

740 if self.keys: 

741 # Phase 1: serialize regular objects, ignore fancy keys. 

742 flatten = self._flatten_string_key_value_pair 

743 for k, v in util.items(obj): 

744 flatten(k, v, data) 

745 

746 # Phase 2: serialize non-string keys. 

747 flatten = self._flatten_non_string_key_value_pair 

748 for k, v in util.items(obj): 

749 flatten(k, v, data) 

750 else: 

751 # If we have string keys only then we only need a single pass. 

752 flatten = self._flatten_key_value_pair 

753 for k, v in util.items(obj): 

754 flatten(k, v, data) 

755 

756 # the collections.defaultdict protocol 

757 if hasattr(obj, 'default_factory') and callable(obj.default_factory): 

758 factory = obj.default_factory 

759 if util.is_type(factory): 

760 # Reference the class/type 

761 value = _mktyperef(factory) 

762 else: 

763 # The factory is not a type and could reference e.g. functions 

764 # or even the object instance itself, which creates a cycle. 

765 if self._mkref(factory): 

766 # We've never seen this object before so pickle it in-place. 

767 # Create an instance from the factory and assume that the 

768 # resulting instance is a suitable exemplar. 

769 value = self._flatten_obj_instance(handlers.CloneFactory(factory())) 

770 else: 

771 # We've seen this object before. 

772 # Break the cycle by emitting a reference. 

773 value = self._getref(factory) 

774 data['default_factory'] = value 

775 

776 # Sub-classes of dict 

777 if hasattr(obj, '__dict__') and self.unpicklable and obj != obj.__dict__: 

778 dict_data = {} 

779 self._flatten_dict_obj(obj.__dict__, dict_data) 

780 data['__dict__'] = dict_data 

781 

782 return data 

783 

784 def _get_flattener(self, obj): 

785 if type(obj) in (list, dict): 

786 if self._mkref(obj): 

787 return ( 

788 self._list_recurse if type(obj) is list else self._flatten_dict_obj 

789 ) 

790 else: 

791 self._push() 

792 return self._getref 

793 

794 # We handle tuples and sets by encoding them in a "(tuple|set)dict" 

795 elif type(obj) in (tuple, set): 

796 if not self.unpicklable: 

797 return self._list_recurse 

798 return lambda obj: { 

799 tags.TUPLE 

800 if type(obj) is tuple 

801 else tags.SET: [self._flatten(v) for v in obj] 

802 } 

803 

804 elif util.is_object(obj): 

805 return self._ref_obj_instance 

806 

807 elif util.is_type(obj): 

808 return _mktyperef 

809 

810 elif util.is_module_function(obj): 

811 return self._flatten_function 

812 

813 # instance methods, lambdas, old style classes... 

814 self._pickle_warning(obj) 

815 return None 

816 

817 def _flatten_sequence_obj(self, obj, data): 

818 """Return a json-friendly dict for a sequence subclass.""" 

819 if hasattr(obj, '__dict__'): 

820 self._flatten_dict_obj(obj.__dict__, data) 

821 value = [self._flatten(v) for v in obj] 

822 if self.unpicklable: 

823 data[tags.SEQ] = value 

824 else: 

825 return value 

826 return data