Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/cachelib/redis.py: 6%

81 statements  

« prev     ^ index     » next       coverage.py v7.0.1, created at 2022-12-25 06:11 +0000

1import typing as _t 

2 

3from cachelib.base import BaseCache 

4from cachelib.serializers import RedisSerializer 

5 

6 

7class RedisCache(BaseCache): 

8 """Uses the Redis key-value store as a cache backend. 

9 

10 The first argument can be either a string denoting address of the Redis 

11 server or an object resembling an instance of a redis.Redis class. 

12 

13 Note: Python Redis API already takes care of encoding unicode strings on 

14 the fly. 

15 

16 :param host: address of the Redis server or an object which API is 

17 compatible with the official Python Redis client (redis-py). 

18 :param port: port number on which Redis server listens for connections. 

19 :param password: password authentication for the Redis server. 

20 :param db: db (zero-based numeric index) on Redis Server to connect. 

21 :param default_timeout: the default timeout that is used if no timeout is 

22 specified on :meth:`~BaseCache.set`. A timeout of 

23 0 indicates that the cache never expires. 

24 :param key_prefix: A prefix that should be added to all keys. 

25 

26 Any additional keyword arguments will be passed to ``redis.Redis``. 

27 """ 

28 

29 _read_client: _t.Any = None 

30 _write_client: _t.Any = None 

31 serializer = RedisSerializer() 

32 

33 def __init__( 

34 self, 

35 host: _t.Any = "localhost", 

36 port: int = 6379, 

37 password: _t.Optional[str] = None, 

38 db: int = 0, 

39 default_timeout: int = 300, 

40 key_prefix: _t.Optional[str] = None, 

41 **kwargs: _t.Any 

42 ): 

43 BaseCache.__init__(self, default_timeout) 

44 if host is None: 

45 raise ValueError("RedisCache host parameter may not be None") 

46 if isinstance(host, str): 

47 try: 

48 import redis 

49 except ImportError as err: 

50 raise RuntimeError("no redis module found") from err 

51 if kwargs.get("decode_responses", None): 

52 raise ValueError("decode_responses is not supported by RedisCache.") 

53 self._write_client = self._read_client = redis.Redis( 

54 host=host, port=port, password=password, db=db, **kwargs 

55 ) 

56 else: 

57 self._read_client = self._write_client = host 

58 self.key_prefix = key_prefix or "" 

59 

60 def _normalize_timeout(self, timeout: _t.Optional[int]) -> int: 

61 """Normalize timeout by setting it to default of 300 if 

62 not defined (None) or -1 if explicitly set to zero. 

63 

64 :param timeout: timeout to normalize. 

65 """ 

66 timeout = BaseCache._normalize_timeout(self, timeout) 

67 if timeout == 0: 

68 timeout = -1 

69 return timeout 

70 

71 def get(self, key: str) -> _t.Any: 

72 return self.serializer.loads(self._read_client.get(self.key_prefix + key)) 

73 

74 def get_many(self, *keys: str) -> _t.List[_t.Any]: 

75 if self.key_prefix: 

76 prefixed_keys = [self.key_prefix + key for key in keys] 

77 else: 

78 prefixed_keys = list(keys) 

79 return [self.serializer.loads(x) for x in self._read_client.mget(prefixed_keys)] 

80 

81 def set(self, key: str, value: _t.Any, timeout: _t.Optional[int] = None) -> _t.Any: 

82 timeout = self._normalize_timeout(timeout) 

83 dump = self.serializer.dumps(value) 

84 if timeout == -1: 

85 result = self._write_client.set(name=self.key_prefix + key, value=dump) 

86 else: 

87 result = self._write_client.setex( 

88 name=self.key_prefix + key, value=dump, time=timeout 

89 ) 

90 return result 

91 

92 def add(self, key: str, value: _t.Any, timeout: _t.Optional[int] = None) -> _t.Any: 

93 timeout = self._normalize_timeout(timeout) 

94 dump = self.serializer.dumps(value) 

95 created = self._write_client.setnx(name=self.key_prefix + key, value=dump) 

96 # handle case where timeout is explicitly set to zero 

97 if created and timeout != -1: 

98 self._write_client.expire(name=self.key_prefix + key, time=timeout) 

99 return created 

100 

101 def set_many( 

102 self, mapping: _t.Dict[str, _t.Any], timeout: _t.Optional[int] = None 

103 ) -> _t.List[_t.Any]: 

104 timeout = self._normalize_timeout(timeout) 

105 # Use transaction=False to batch without calling redis MULTI 

106 # which is not supported by twemproxy 

107 pipe = self._write_client.pipeline(transaction=False) 

108 

109 for key, value in mapping.items(): 

110 dump = self.serializer.dumps(value) 

111 if timeout == -1: 

112 pipe.set(name=self.key_prefix + key, value=dump) 

113 else: 

114 pipe.setex(name=self.key_prefix + key, value=dump, time=timeout) 

115 results = pipe.execute() 

116 return [k for k, was_set in zip(mapping.keys(), results) if was_set] 

117 

118 def delete(self, key: str) -> bool: 

119 return bool(self._write_client.delete(self.key_prefix + key)) 

120 

121 def delete_many(self, *keys: str) -> _t.List[_t.Any]: 

122 if not keys: 

123 return [] 

124 if self.key_prefix: 

125 prefixed_keys = [self.key_prefix + key for key in keys] 

126 else: 

127 prefixed_keys = [k for k in keys] 

128 self._write_client.delete(*prefixed_keys) 

129 return [k for k in prefixed_keys if not self.has(k)] 

130 

131 def has(self, key: str) -> bool: 

132 return bool(self._read_client.exists(self.key_prefix + key)) 

133 

134 def clear(self) -> bool: 

135 status = 0 

136 if self.key_prefix: 

137 keys = self._read_client.keys(self.key_prefix + "*") 

138 if keys: 

139 status = self._write_client.delete(*keys) 

140 else: 

141 status = self._write_client.flushdb() 

142 return bool(status) 

143 

144 def inc(self, key: str, delta: int = 1) -> _t.Any: 

145 return self._write_client.incr(name=self.key_prefix + key, amount=delta) 

146 

147 def dec(self, key: str, delta: int = 1) -> _t.Any: 

148 return self._write_client.incr(name=self.key_prefix + key, amount=-delta)