Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/prompt_toolkit/cache.py: 56%

57 statements  

« prev     ^ index     » next       coverage.py v7.4.4, created at 2024-04-20 06:09 +0000

1from __future__ import annotations 

2 

3from collections import deque 

4from functools import wraps 

5from typing import Any, Callable, Dict, Generic, Hashable, Tuple, TypeVar, cast 

6 

7__all__ = [ 

8 "SimpleCache", 

9 "FastDictCache", 

10 "memoized", 

11] 

12 

13_T = TypeVar("_T", bound=Hashable) 

14_U = TypeVar("_U") 

15 

16 

17class SimpleCache(Generic[_T, _U]): 

18 """ 

19 Very simple cache that discards the oldest item when the cache size is 

20 exceeded. 

21 

22 :param maxsize: Maximum size of the cache. (Don't make it too big.) 

23 """ 

24 

25 def __init__(self, maxsize: int = 8) -> None: 

26 assert maxsize > 0 

27 

28 self._data: dict[_T, _U] = {} 

29 self._keys: deque[_T] = deque() 

30 self.maxsize: int = maxsize 

31 

32 def get(self, key: _T, getter_func: Callable[[], _U]) -> _U: 

33 """ 

34 Get object from the cache. 

35 If not found, call `getter_func` to resolve it, and put that on the top 

36 of the cache instead. 

37 """ 

38 # Look in cache first. 

39 try: 

40 return self._data[key] 

41 except KeyError: 

42 # Not found? Get it. 

43 value = getter_func() 

44 self._data[key] = value 

45 self._keys.append(key) 

46 

47 # Remove the oldest key when the size is exceeded. 

48 if len(self._data) > self.maxsize: 

49 key_to_remove = self._keys.popleft() 

50 if key_to_remove in self._data: 

51 del self._data[key_to_remove] 

52 

53 return value 

54 

55 def clear(self) -> None: 

56 "Clear cache." 

57 self._data = {} 

58 self._keys = deque() 

59 

60 

61_K = TypeVar("_K", bound=Tuple[Hashable, ...]) 

62_V = TypeVar("_V") 

63 

64 

65class FastDictCache(Dict[_K, _V]): 

66 """ 

67 Fast, lightweight cache which keeps at most `size` items. 

68 It will discard the oldest items in the cache first. 

69 

70 The cache is a dictionary, which doesn't keep track of access counts. 

71 It is perfect to cache little immutable objects which are not expensive to 

72 create, but where a dictionary lookup is still much faster than an object 

73 instantiation. 

74 

75 :param get_value: Callable that's called in case of a missing key. 

76 """ 

77 

78 # NOTE: This cache is used to cache `prompt_toolkit.layout.screen.Char` and 

79 # `prompt_toolkit.Document`. Make sure to keep this really lightweight. 

80 # Accessing the cache should stay faster than instantiating new 

81 # objects. 

82 # (Dictionary lookups are really fast.) 

83 # SimpleCache is still required for cases where the cache key is not 

84 # the same as the arguments given to the function that creates the 

85 # value.) 

86 def __init__(self, get_value: Callable[..., _V], size: int = 1000000) -> None: 

87 assert size > 0 

88 

89 self._keys: deque[_K] = deque() 

90 self.get_value = get_value 

91 self.size = size 

92 

93 def __missing__(self, key: _K) -> _V: 

94 # Remove the oldest key when the size is exceeded. 

95 if len(self) > self.size: 

96 key_to_remove = self._keys.popleft() 

97 if key_to_remove in self: 

98 del self[key_to_remove] 

99 

100 result = self.get_value(*key) 

101 self[key] = result 

102 self._keys.append(key) 

103 return result 

104 

105 

106_F = TypeVar("_F", bound=Callable[..., object]) 

107 

108 

109def memoized(maxsize: int = 1024) -> Callable[[_F], _F]: 

110 """ 

111 Memoization decorator for immutable classes and pure functions. 

112 """ 

113 

114 def decorator(obj: _F) -> _F: 

115 cache: SimpleCache[Hashable, Any] = SimpleCache(maxsize=maxsize) 

116 

117 @wraps(obj) 

118 def new_callable(*a: Any, **kw: Any) -> Any: 

119 def create_new() -> Any: 

120 return obj(*a, **kw) 

121 

122 key = (a, tuple(sorted(kw.items()))) 

123 return cache.get(key, create_new) 

124 

125 return cast(_F, new_callable) 

126 

127 return decorator