Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/prompt_toolkit/cache.py: 56%
57 statements
« prev ^ index » next coverage.py v7.4.4, created at 2024-04-20 06:09 +0000
« prev ^ index » next coverage.py v7.4.4, created at 2024-04-20 06:09 +0000
1from __future__ import annotations
3from collections import deque
4from functools import wraps
5from typing import Any, Callable, Dict, Generic, Hashable, Tuple, TypeVar, cast
7__all__ = [
8 "SimpleCache",
9 "FastDictCache",
10 "memoized",
11]
13_T = TypeVar("_T", bound=Hashable)
14_U = TypeVar("_U")
17class SimpleCache(Generic[_T, _U]):
18 """
19 Very simple cache that discards the oldest item when the cache size is
20 exceeded.
22 :param maxsize: Maximum size of the cache. (Don't make it too big.)
23 """
25 def __init__(self, maxsize: int = 8) -> None:
26 assert maxsize > 0
28 self._data: dict[_T, _U] = {}
29 self._keys: deque[_T] = deque()
30 self.maxsize: int = maxsize
32 def get(self, key: _T, getter_func: Callable[[], _U]) -> _U:
33 """
34 Get object from the cache.
35 If not found, call `getter_func` to resolve it, and put that on the top
36 of the cache instead.
37 """
38 # Look in cache first.
39 try:
40 return self._data[key]
41 except KeyError:
42 # Not found? Get it.
43 value = getter_func()
44 self._data[key] = value
45 self._keys.append(key)
47 # Remove the oldest key when the size is exceeded.
48 if len(self._data) > self.maxsize:
49 key_to_remove = self._keys.popleft()
50 if key_to_remove in self._data:
51 del self._data[key_to_remove]
53 return value
55 def clear(self) -> None:
56 "Clear cache."
57 self._data = {}
58 self._keys = deque()
61_K = TypeVar("_K", bound=Tuple[Hashable, ...])
62_V = TypeVar("_V")
65class FastDictCache(Dict[_K, _V]):
66 """
67 Fast, lightweight cache which keeps at most `size` items.
68 It will discard the oldest items in the cache first.
70 The cache is a dictionary, which doesn't keep track of access counts.
71 It is perfect to cache little immutable objects which are not expensive to
72 create, but where a dictionary lookup is still much faster than an object
73 instantiation.
75 :param get_value: Callable that's called in case of a missing key.
76 """
78 # NOTE: This cache is used to cache `prompt_toolkit.layout.screen.Char` and
79 # `prompt_toolkit.Document`. Make sure to keep this really lightweight.
80 # Accessing the cache should stay faster than instantiating new
81 # objects.
82 # (Dictionary lookups are really fast.)
83 # SimpleCache is still required for cases where the cache key is not
84 # the same as the arguments given to the function that creates the
85 # value.)
86 def __init__(self, get_value: Callable[..., _V], size: int = 1000000) -> None:
87 assert size > 0
89 self._keys: deque[_K] = deque()
90 self.get_value = get_value
91 self.size = size
93 def __missing__(self, key: _K) -> _V:
94 # Remove the oldest key when the size is exceeded.
95 if len(self) > self.size:
96 key_to_remove = self._keys.popleft()
97 if key_to_remove in self:
98 del self[key_to_remove]
100 result = self.get_value(*key)
101 self[key] = result
102 self._keys.append(key)
103 return result
106_F = TypeVar("_F", bound=Callable[..., object])
109def memoized(maxsize: int = 1024) -> Callable[[_F], _F]:
110 """
111 Memoization decorator for immutable classes and pure functions.
112 """
114 def decorator(obj: _F) -> _F:
115 cache: SimpleCache[Hashable, Any] = SimpleCache(maxsize=maxsize)
117 @wraps(obj)
118 def new_callable(*a: Any, **kw: Any) -> Any:
119 def create_new() -> Any:
120 return obj(*a, **kw)
122 key = (a, tuple(sorted(kw.items())))
123 return cache.get(key, create_new)
125 return cast(_F, new_callable)
127 return decorator