Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.11/site-packages/prompt_toolkit/cache.py: 57%
Shortcuts on this page
r m x toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
Shortcuts on this page
r m x toggle line displays
j k next/prev highlighted chunk
0 (zero) top of page
1 (one) first highlighted chunk
1from __future__ import annotations
3from collections import deque
4from collections.abc import Callable, Hashable
5from functools import wraps
6from typing import Any, Generic, TypeVar, cast
8__all__ = [
9 "SimpleCache",
10 "FastDictCache",
11 "memoized",
12]
14_T = TypeVar("_T", bound=Hashable)
15_U = TypeVar("_U")
18class SimpleCache(Generic[_T, _U]):
19 """
20 Very simple cache that discards the oldest item when the cache size is
21 exceeded.
23 :param maxsize: Maximum size of the cache. (Don't make it too big.)
24 """
26 def __init__(self, maxsize: int = 8) -> None:
27 assert maxsize > 0
29 self._data: dict[_T, _U] = {}
30 self._keys: deque[_T] = deque()
31 self.maxsize: int = maxsize
33 def get(self, key: _T, getter_func: Callable[[], _U]) -> _U:
34 """
35 Get object from the cache.
36 If not found, call `getter_func` to resolve it, and put that on the top
37 of the cache instead.
38 """
39 # Look in cache first.
40 try:
41 return self._data[key]
42 except KeyError:
43 # Not found? Get it.
44 value = getter_func()
45 self._data[key] = value
46 self._keys.append(key)
48 # Remove the oldest key when the size is exceeded.
49 if len(self._data) > self.maxsize:
50 key_to_remove = self._keys.popleft()
51 if key_to_remove in self._data:
52 del self._data[key_to_remove]
54 return value
56 def clear(self) -> None:
57 "Clear cache."
58 self._data = {}
59 self._keys = deque()
62_K = TypeVar("_K", bound=tuple[Hashable, ...])
63_V = TypeVar("_V")
66class FastDictCache(dict[_K, _V]):
67 """
68 Fast, lightweight cache which keeps at most `size` items.
69 It will discard the oldest items in the cache first.
71 The cache is a dictionary, which doesn't keep track of access counts.
72 It is perfect to cache little immutable objects which are not expensive to
73 create, but where a dictionary lookup is still much faster than an object
74 instantiation.
76 :param get_value: Callable that's called in case of a missing key.
77 """
79 # NOTE: This cache is used to cache `prompt_toolkit.layout.screen.Char` and
80 # `prompt_toolkit.Document`. Make sure to keep this really lightweight.
81 # Accessing the cache should stay faster than instantiating new
82 # objects.
83 # (Dictionary lookups are really fast.)
84 # SimpleCache is still required for cases where the cache key is not
85 # the same as the arguments given to the function that creates the
86 # value.)
87 def __init__(self, get_value: Callable[..., _V], size: int = 1000000) -> None:
88 assert size > 0
90 self._keys: deque[_K] = deque()
91 self.get_value = get_value
92 self.size = size
94 def __missing__(self, key: _K) -> _V:
95 # Remove the oldest key when the size is exceeded.
96 if len(self) > self.size:
97 key_to_remove = self._keys.popleft()
98 if key_to_remove in self:
99 del self[key_to_remove]
101 result = self.get_value(*key)
102 self[key] = result
103 self._keys.append(key)
104 return result
107_F = TypeVar("_F", bound=Callable[..., object])
110def memoized(maxsize: int = 1024) -> Callable[[_F], _F]:
111 """
112 Memoization decorator for immutable classes and pure functions.
113 """
115 def decorator(obj: _F) -> _F:
116 cache: SimpleCache[Hashable, Any] = SimpleCache(maxsize=maxsize)
118 @wraps(obj)
119 def new_callable(*a: Any, **kw: Any) -> Any:
120 def create_new() -> Any:
121 return obj(*a, **kw)
123 key = (a, tuple(sorted(kw.items())))
124 return cache.get(key, create_new)
126 return cast(_F, new_callable)
128 return decorator