Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.11/site-packages/retrying.py: 60%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

174 statements  

1# Copyright 2013-2014 Ray Holder 

2# 

3# Licensed under the Apache License, Version 2.0 (the "License"); 

4# you may not use this file except in compliance with the License. 

5# You may obtain a copy of the License at 

6# 

7# http://www.apache.org/licenses/LICENSE-2.0 

8# 

9# Unless required by applicable law or agreed to in writing, software 

10# distributed under the License is distributed on an "AS IS" BASIS, 

11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 

12# See the License for the specific language governing permissions and 

13# limitations under the License. 

14import logging 

15import random 

16import sys 

17import time 

18import traceback 

19from functools import wraps 

20 

21# sys.maxint / 2, since Python 3.2 doesn't have a sys.maxint... 

22MAX_WAIT = 1073741823 

23 

24 

25def _retry_if_exception_of_type(retryable_types): 

26 def _retry_if_exception_these_types(exception): 

27 return isinstance(exception, retryable_types) 

28 

29 return _retry_if_exception_these_types 

30 

31 

32def retry(*dargs, **dkw): 

33 """ 

34 Decorator function that instantiates the Retrying object 

35 @param *dargs: positional arguments passed to Retrying object 

36 @param **dkw: keyword arguments passed to the Retrying object 

37 """ 

38 # support both @retry and @retry() as valid syntax 

39 if len(dargs) == 1 and callable(dargs[0]): 

40 

41 def wrap_simple(f): 

42 @wraps(f) 

43 def wrapped_f(*args, **kw): 

44 return Retrying().call(f, *args, **kw) 

45 

46 return wrapped_f 

47 

48 return wrap_simple(dargs[0]) 

49 

50 else: 

51 

52 def wrap(f): 

53 @wraps(f) 

54 def wrapped_f(*args, **kw): 

55 return Retrying(*dargs, **dkw).call(f, *args, **kw) 

56 

57 return wrapped_f 

58 

59 return wrap 

60 

61_default_logger = None 

62_configured_null_logger = False 

63 

64def _pick_logger(logger=None): 

65 # Factor this logic out into a smaller function so that `global` only needs to be here, 

66 # not the large __init__ function. 

67 global _default_logger, _configured_null_logger 

68 

69 if logger in (True, None): 

70 if _default_logger is None: 

71 _default_logger = logging.getLogger(__name__) 

72 # Only add the null handler once, not every time we get the logger 

73 if logger is None and not _configured_null_logger: 

74 _configured_null_logger = True 

75 _default_logger.addHandler(logging.NullHandler()) 

76 _default_logger.propagate = False 

77 return _default_logger 

78 else: # Not None (and not True) -> must have supplied a logger. Just use that. 

79 return logger 

80 

81 

82class Retrying(object): 

83 def __init__( 

84 self, 

85 stop=None, 

86 wait=None, 

87 stop_max_attempt_number=None, 

88 stop_max_delay=None, 

89 wait_fixed=None, 

90 wait_random_min=None, 

91 wait_random_max=None, 

92 wait_incrementing_start=None, 

93 wait_incrementing_increment=None, 

94 wait_incrementing_max=None, 

95 wait_exponential_multiplier=None, 

96 wait_exponential_max=None, 

97 retry_on_exception=None, 

98 retry_on_result=None, 

99 wrap_exception=False, 

100 stop_func=None, 

101 wait_func=None, 

102 wait_jitter_max=None, 

103 before_attempts=None, 

104 after_attempts=None, 

105 logger=None, 

106 ): 

107 

108 self._stop_max_attempt_number = ( 

109 5 if stop_max_attempt_number is None else stop_max_attempt_number 

110 ) 

111 self._stop_max_delay = 100 if stop_max_delay is None else stop_max_delay 

112 self._wait_fixed = 1000 if wait_fixed is None else wait_fixed 

113 self._wait_random_min = 0 if wait_random_min is None else wait_random_min 

114 self._wait_random_max = 1000 if wait_random_max is None else wait_random_max 

115 self._wait_incrementing_start = ( 

116 0 if wait_incrementing_start is None else wait_incrementing_start 

117 ) 

118 self._wait_incrementing_increment = ( 

119 100 if wait_incrementing_increment is None else wait_incrementing_increment 

120 ) 

121 self._wait_exponential_multiplier = ( 

122 1 if wait_exponential_multiplier is None else wait_exponential_multiplier 

123 ) 

124 self._wait_exponential_max = ( 

125 MAX_WAIT if wait_exponential_max is None else wait_exponential_max 

126 ) 

127 self._wait_incrementing_max = ( 

128 MAX_WAIT if wait_incrementing_max is None else wait_incrementing_max 

129 ) 

130 self._wait_jitter_max = 0 if wait_jitter_max is None else wait_jitter_max 

131 self._before_attempts = before_attempts 

132 self._after_attempts = after_attempts 

133 

134 self._logger = _pick_logger(logger) 

135 

136 # TODO add chaining of stop behaviors 

137 # stop behavior 

138 stop_funcs = [] 

139 if stop_max_attempt_number is not None: 

140 stop_funcs.append(self.stop_after_attempt) 

141 

142 if stop_max_delay is not None: 

143 stop_funcs.append(self.stop_after_delay) 

144 

145 if stop_func is not None: 

146 self.stop = stop_func 

147 

148 elif stop is None: 

149 self.stop = lambda attempts, delay: any( 

150 f(attempts, delay) for f in stop_funcs 

151 ) 

152 

153 else: 

154 self.stop = getattr(self, stop) 

155 

156 # TODO add chaining of wait behaviors 

157 # wait behavior 

158 wait_funcs = [lambda *args, **kwargs: 0] 

159 if wait_fixed is not None: 

160 wait_funcs.append(self.fixed_sleep) 

161 

162 if wait_random_min is not None or wait_random_max is not None: 

163 wait_funcs.append(self.random_sleep) 

164 

165 if ( 

166 wait_incrementing_start is not None 

167 or wait_incrementing_increment is not None 

168 ): 

169 wait_funcs.append(self.incrementing_sleep) 

170 

171 if wait_exponential_multiplier is not None or wait_exponential_max is not None: 

172 wait_funcs.append(self.exponential_sleep) 

173 

174 if wait_func is not None: 

175 self.wait = wait_func 

176 

177 elif wait is None: 

178 self.wait = lambda attempts, delay: max( 

179 f(attempts, delay) for f in wait_funcs 

180 ) 

181 

182 else: 

183 self.wait = getattr(self, wait) 

184 

185 # retry on exception filter 

186 if retry_on_exception is None: 

187 self._retry_on_exception = self.always_reject 

188 else: 

189 # this allows for providing a tuple of exception types that 

190 # should be allowed to retry on, and avoids having to create 

191 # a callback that does the same thing 

192 if isinstance(retry_on_exception, (tuple, Exception)): 

193 retry_on_exception = _retry_if_exception_of_type(retry_on_exception) 

194 self._retry_on_exception = retry_on_exception 

195 

196 # retry on result filter 

197 if retry_on_result is None: 

198 self._retry_on_result = self.never_reject 

199 else: 

200 self._retry_on_result = retry_on_result 

201 

202 self._wrap_exception = wrap_exception 

203 

204 def stop_after_attempt(self, previous_attempt_number, delay_since_first_attempt_ms): 

205 """Stop after the previous attempt >= stop_max_attempt_number.""" 

206 return previous_attempt_number >= self._stop_max_attempt_number 

207 

208 def stop_after_delay(self, previous_attempt_number, delay_since_first_attempt_ms): 

209 """Stop after the time from the first attempt >= stop_max_delay.""" 

210 return delay_since_first_attempt_ms >= self._stop_max_delay 

211 

212 @staticmethod 

213 def no_sleep(previous_attempt_number, delay_since_first_attempt_ms): 

214 """Don't sleep at all before retrying.""" 

215 return 0 

216 

217 def fixed_sleep(self, previous_attempt_number, delay_since_first_attempt_ms): 

218 """Sleep a fixed amount of time between each retry.""" 

219 return self._wait_fixed 

220 

221 def random_sleep(self, previous_attempt_number, delay_since_first_attempt_ms): 

222 """Sleep a random amount of time between wait_random_min and wait_random_max""" 

223 return random.randint(self._wait_random_min, self._wait_random_max) 

224 

225 def incrementing_sleep(self, previous_attempt_number, delay_since_first_attempt_ms): 

226 """ 

227 Sleep an incremental amount of time after each attempt, starting at 

228 wait_incrementing_start and incrementing by wait_incrementing_increment 

229 """ 

230 result = self._wait_incrementing_start + ( 

231 self._wait_incrementing_increment * (previous_attempt_number - 1) 

232 ) 

233 if result > self._wait_incrementing_max: 

234 result = self._wait_incrementing_max 

235 if result < 0: 

236 result = 0 

237 return result 

238 

239 def exponential_sleep(self, previous_attempt_number, delay_since_first_attempt_ms): 

240 exp = 2**previous_attempt_number 

241 result = self._wait_exponential_multiplier * exp 

242 if result > self._wait_exponential_max: 

243 result = self._wait_exponential_max 

244 if result < 0: 

245 result = 0 

246 return result 

247 

248 @staticmethod 

249 def never_reject(result): 

250 return False 

251 

252 @staticmethod 

253 def always_reject(result): 

254 return True 

255 

256 def should_reject(self, attempt): 

257 reject = False 

258 if attempt.has_exception: 

259 reject |= self._retry_on_exception(attempt.value[1]) 

260 else: 

261 reject |= self._retry_on_result(attempt.value) 

262 

263 return reject 

264 

265 def call(self, fn, *args, **kwargs): 

266 start_time = int(round(time.time() * 1000)) 

267 attempt_number = 1 

268 while True: 

269 if self._before_attempts: 

270 self._before_attempts(attempt_number) 

271 

272 try: 

273 attempt = Attempt(fn(*args, **kwargs), attempt_number, False) 

274 except Exception: 

275 tb = sys.exc_info() 

276 attempt = Attempt(tb, attempt_number, True) 

277 

278 if not self.should_reject(attempt): 

279 return attempt.get(self._wrap_exception) 

280 

281 self._logger.warning(attempt) 

282 if self._after_attempts: 

283 self._after_attempts(attempt_number) 

284 

285 delay_since_first_attempt_ms = int(round(time.time() * 1000)) - start_time 

286 if self.stop(attempt_number, delay_since_first_attempt_ms): 

287 if not self._wrap_exception and attempt.has_exception: 

288 # get() on an attempt with an exception should cause it to be raised, but raise just in case 

289 raise attempt.get() 

290 else: 

291 raise RetryError(attempt) 

292 else: 

293 sleep = self.wait(attempt_number, delay_since_first_attempt_ms) 

294 if self._wait_jitter_max: 

295 jitter = random.random() * self._wait_jitter_max 

296 sleep = sleep + max(0, jitter) 

297 self._logger.info(f"Retrying in {sleep / 1000.0:.2f} seconds.") 

298 time.sleep(sleep / 1000.0) 

299 

300 attempt_number += 1 

301 

302 

303class Attempt(object): 

304 """ 

305 An Attempt encapsulates a call to a target function that may end as a 

306 normal return value from the function or an Exception depending on what 

307 occurred during the execution. 

308 """ 

309 

310 def __init__(self, value, attempt_number, has_exception): 

311 self.value = value 

312 self.attempt_number = attempt_number 

313 self.has_exception = has_exception 

314 

315 def get(self, wrap_exception=False): 

316 """ 

317 Return the return value of this Attempt instance or raise an Exception. 

318 If wrap_exception is true, this Attempt is wrapped inside of a 

319 RetryError before being raised. 

320 """ 

321 if self.has_exception: 

322 if wrap_exception: 

323 raise RetryError(self) 

324 else: 

325 exc_type, exc, tb = self.value 

326 raise exc.with_traceback(tb) 

327 else: 

328 return self.value 

329 

330 def __repr__(self): 

331 if self.has_exception: 

332 return f"Attempts: {self.attempt_number}, Error:\n{''.join(traceback.format_tb(self.value[2]))}" 

333 else: 

334 return f"Attempts: {self.attempt_number}, Value: {self.value}" 

335 

336 

337class RetryError(Exception): 

338 """ 

339 A RetryError encapsulates the last Attempt instance right before giving up. 

340 """ 

341 

342 def __init__(self, last_attempt): 

343 self.last_attempt = last_attempt 

344 

345 def __str__(self): 

346 return f"RetryError[{self.last_attempt}]"