Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.10/site-packages/astroid/inference_tip.py: 89%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

46 statements  

1# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html 

2# For details: https://github.com/pylint-dev/astroid/blob/main/LICENSE 

3# Copyright (c) https://github.com/pylint-dev/astroid/blob/main/CONTRIBUTORS.txt 

4 

5"""Transform utilities (filters and decorator).""" 

6 

7from __future__ import annotations 

8 

9from collections import OrderedDict 

10from collections.abc import Generator 

11from typing import Any, TypeVar 

12 

13from astroid.context import InferenceContext 

14from astroid.exceptions import InferenceOverwriteError, UseInferenceDefault 

15from astroid.nodes import NodeNG 

16from astroid.typing import ( 

17 InferenceResult, 

18 InferFn, 

19 TransformFn, 

20) 

21 

22_cache: OrderedDict[ 

23 tuple[InferFn[Any], NodeNG, InferenceContext | None], list[InferenceResult] 

24] = OrderedDict() 

25 

26_CURRENTLY_INFERRING: set[tuple[InferFn[Any], NodeNG]] = set() 

27 

28_NodesT = TypeVar("_NodesT", bound=NodeNG) 

29 

30 

31def clear_inference_tip_cache() -> None: 

32 """Clear the inference tips cache.""" 

33 _cache.clear() 

34 

35 

36def _inference_tip_cached(func: InferFn[_NodesT]) -> InferFn[_NodesT]: 

37 """Cache decorator used for inference tips.""" 

38 

39 def inner( 

40 node: _NodesT, 

41 context: InferenceContext | None = None, 

42 **kwargs: Any, 

43 ) -> Generator[InferenceResult]: 

44 partial_cache_key = (func, node) 

45 if partial_cache_key in _CURRENTLY_INFERRING: 

46 # If through recursion we end up trying to infer the same 

47 # func + node we raise here. 

48 _CURRENTLY_INFERRING.remove(partial_cache_key) 

49 raise UseInferenceDefault 

50 if context is not None and context.is_empty(): 

51 # Fresh, empty contexts will defeat the cache. 

52 context = None 

53 try: 

54 yield from _cache[func, node, context] 

55 return 

56 except KeyError: 

57 # Recursion guard with a partial cache key. 

58 # Using the full key causes a recursion error on PyPy. 

59 # It's a pragmatic compromise to avoid so much recursive inference 

60 # with slightly different contexts while still passing the simple 

61 # test cases included with this commit. 

62 _CURRENTLY_INFERRING.add(partial_cache_key) 

63 try: 

64 # May raise UseInferenceDefault 

65 result = _cache[func, node, context] = list( 

66 func(node, context, **kwargs) 

67 ) 

68 except Exception as e: 

69 # Suppress the KeyError from the cache miss. 

70 raise e from None 

71 finally: 

72 # Remove recursion guard. 

73 try: 

74 _CURRENTLY_INFERRING.remove(partial_cache_key) 

75 except KeyError: 

76 pass # Recursion may beat us to the punch. 

77 

78 if len(_cache) > 64: 

79 _cache.popitem(last=False) 

80 

81 # https://github.com/pylint-dev/pylint/issues/8686 

82 yield from result # pylint: disable=used-before-assignment 

83 

84 return inner 

85 

86 

87def inference_tip( 

88 infer_function: InferFn[_NodesT], raise_on_overwrite: bool = False 

89) -> TransformFn[_NodesT]: 

90 """Given an instance specific inference function, return a function to be 

91 given to AstroidManager().register_transform to set this inference function. 

92 

93 :param bool raise_on_overwrite: Raise an `InferenceOverwriteError` 

94 if the inference tip will overwrite another. Used for debugging 

95 

96 Typical usage 

97 

98 .. sourcecode:: python 

99 

100 AstroidManager().register_transform(Call, inference_tip(infer_named_tuple), 

101 predicate) 

102 

103 .. Note:: 

104 

105 Using an inference tip will override 

106 any previously set inference tip for the given 

107 node. Use a predicate in the transform to prevent 

108 excess overwrites. 

109 """ 

110 

111 def transform( 

112 node: _NodesT, infer_function: InferFn[_NodesT] = infer_function 

113 ) -> _NodesT: 

114 if ( 

115 raise_on_overwrite 

116 and node._explicit_inference is not None 

117 and node._explicit_inference is not infer_function 

118 ): 

119 raise InferenceOverwriteError( 

120 "Inference already set to {existing_inference}. " 

121 "Trying to overwrite with {new_inference} for {node}".format( 

122 existing_inference=infer_function, 

123 new_inference=node._explicit_inference, 

124 node=node, 

125 ) 

126 ) 

127 node._explicit_inference = _inference_tip_cached(infer_function) 

128 return node 

129 

130 return transform