Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/retrying.py: 58%
155 statements
« prev ^ index » next coverage.py v7.3.2, created at 2023-12-08 06:51 +0000
« prev ^ index » next coverage.py v7.3.2, created at 2023-12-08 06:51 +0000
1# Copyright 2013-2014 Ray Holder
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
15import random
16import six
17import sys
18import time
19import traceback
22# sys.maxint / 2, since Python 3.2 doesn't have a sys.maxint...
23MAX_WAIT = 1073741823
26def _retry_if_exception_of_type(retryable_types):
27 def _retry_if_exception_these_types(exception):
28 return isinstance(exception, retryable_types)
30 return _retry_if_exception_these_types
33def retry(*dargs, **dkw):
34 """
35 Decorator function that instantiates the Retrying object
36 @param *dargs: positional arguments passed to Retrying object
37 @param **dkw: keyword arguments passed to the Retrying object
38 """
39 # support both @retry and @retry() as valid syntax
40 if len(dargs) == 1 and callable(dargs[0]):
42 def wrap_simple(f):
43 @six.wraps(f)
44 def wrapped_f(*args, **kw):
45 return Retrying().call(f, *args, **kw)
47 return wrapped_f
49 return wrap_simple(dargs[0])
51 else:
53 def wrap(f):
54 @six.wraps(f)
55 def wrapped_f(*args, **kw):
56 return Retrying(*dargs, **dkw).call(f, *args, **kw)
58 return wrapped_f
60 return wrap
63class Retrying(object):
64 def __init__(
65 self,
66 stop=None,
67 wait=None,
68 stop_max_attempt_number=None,
69 stop_max_delay=None,
70 wait_fixed=None,
71 wait_random_min=None,
72 wait_random_max=None,
73 wait_incrementing_start=None,
74 wait_incrementing_increment=None,
75 wait_incrementing_max=None,
76 wait_exponential_multiplier=None,
77 wait_exponential_max=None,
78 retry_on_exception=None,
79 retry_on_result=None,
80 wrap_exception=False,
81 stop_func=None,
82 wait_func=None,
83 wait_jitter_max=None,
84 before_attempts=None,
85 after_attempts=None,
86 ):
88 self._stop_max_attempt_number = (
89 5 if stop_max_attempt_number is None else stop_max_attempt_number
90 )
91 self._stop_max_delay = 100 if stop_max_delay is None else stop_max_delay
92 self._wait_fixed = 1000 if wait_fixed is None else wait_fixed
93 self._wait_random_min = 0 if wait_random_min is None else wait_random_min
94 self._wait_random_max = 1000 if wait_random_max is None else wait_random_max
95 self._wait_incrementing_start = (
96 0 if wait_incrementing_start is None else wait_incrementing_start
97 )
98 self._wait_incrementing_increment = (
99 100 if wait_incrementing_increment is None else wait_incrementing_increment
100 )
101 self._wait_exponential_multiplier = (
102 1 if wait_exponential_multiplier is None else wait_exponential_multiplier
103 )
104 self._wait_exponential_max = (
105 MAX_WAIT if wait_exponential_max is None else wait_exponential_max
106 )
107 self._wait_incrementing_max = (
108 MAX_WAIT if wait_incrementing_max is None else wait_incrementing_max
109 )
110 self._wait_jitter_max = 0 if wait_jitter_max is None else wait_jitter_max
111 self._before_attempts = before_attempts
112 self._after_attempts = after_attempts
114 # TODO add chaining of stop behaviors
115 # stop behavior
116 stop_funcs = []
117 if stop_max_attempt_number is not None:
118 stop_funcs.append(self.stop_after_attempt)
120 if stop_max_delay is not None:
121 stop_funcs.append(self.stop_after_delay)
123 if stop_func is not None:
124 self.stop = stop_func
126 elif stop is None:
127 self.stop = lambda attempts, delay: any(
128 f(attempts, delay) for f in stop_funcs
129 )
131 else:
132 self.stop = getattr(self, stop)
134 # TODO add chaining of wait behaviors
135 # wait behavior
136 wait_funcs = [lambda *args, **kwargs: 0]
137 if wait_fixed is not None:
138 wait_funcs.append(self.fixed_sleep)
140 if wait_random_min is not None or wait_random_max is not None:
141 wait_funcs.append(self.random_sleep)
143 if (
144 wait_incrementing_start is not None
145 or wait_incrementing_increment is not None
146 ):
147 wait_funcs.append(self.incrementing_sleep)
149 if wait_exponential_multiplier is not None or wait_exponential_max is not None:
150 wait_funcs.append(self.exponential_sleep)
152 if wait_func is not None:
153 self.wait = wait_func
155 elif wait is None:
156 self.wait = lambda attempts, delay: max(
157 f(attempts, delay) for f in wait_funcs
158 )
160 else:
161 self.wait = getattr(self, wait)
163 # retry on exception filter
164 if retry_on_exception is None:
165 self._retry_on_exception = self.always_reject
166 else:
167 # this allows for providing a tuple of exception types that
168 # should be allowed to retry on, and avoids having to create
169 # a callback that does the same thing
170 if isinstance(retry_on_exception, (tuple)):
171 retry_on_exception = _retry_if_exception_of_type(retry_on_exception)
172 self._retry_on_exception = retry_on_exception
174 # retry on result filter
175 if retry_on_result is None:
176 self._retry_on_result = self.never_reject
177 else:
178 self._retry_on_result = retry_on_result
180 self._wrap_exception = wrap_exception
182 def stop_after_attempt(self, previous_attempt_number, delay_since_first_attempt_ms):
183 """Stop after the previous attempt >= stop_max_attempt_number."""
184 return previous_attempt_number >= self._stop_max_attempt_number
186 def stop_after_delay(self, previous_attempt_number, delay_since_first_attempt_ms):
187 """Stop after the time from the first attempt >= stop_max_delay."""
188 return delay_since_first_attempt_ms >= self._stop_max_delay
190 @staticmethod
191 def no_sleep(previous_attempt_number, delay_since_first_attempt_ms):
192 """Don't sleep at all before retrying."""
193 return 0
195 def fixed_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):
196 """Sleep a fixed amount of time between each retry."""
197 return self._wait_fixed
199 def random_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):
200 """Sleep a random amount of time between wait_random_min and wait_random_max"""
201 return random.randint(self._wait_random_min, self._wait_random_max)
203 def incrementing_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):
204 """
205 Sleep an incremental amount of time after each attempt, starting at
206 wait_incrementing_start and incrementing by wait_incrementing_increment
207 """
208 result = self._wait_incrementing_start + (
209 self._wait_incrementing_increment * (previous_attempt_number - 1)
210 )
211 if result > self._wait_incrementing_max:
212 result = self._wait_incrementing_max
213 if result < 0:
214 result = 0
215 return result
217 def exponential_sleep(self, previous_attempt_number, delay_since_first_attempt_ms):
218 exp = 2**previous_attempt_number
219 result = self._wait_exponential_multiplier * exp
220 if result > self._wait_exponential_max:
221 result = self._wait_exponential_max
222 if result < 0:
223 result = 0
224 return result
226 @staticmethod
227 def never_reject(result):
228 return False
230 @staticmethod
231 def always_reject(result):
232 return True
234 def should_reject(self, attempt):
235 reject = False
236 if attempt.has_exception:
237 reject |= self._retry_on_exception(attempt.value[1])
238 else:
239 reject |= self._retry_on_result(attempt.value)
241 return reject
243 def call(self, fn, *args, **kwargs):
244 start_time = int(round(time.time() * 1000))
245 attempt_number = 1
246 while True:
247 if self._before_attempts:
248 self._before_attempts(attempt_number)
250 try:
251 attempt = Attempt(fn(*args, **kwargs), attempt_number, False)
252 except:
253 tb = sys.exc_info()
254 attempt = Attempt(tb, attempt_number, True)
256 if not self.should_reject(attempt):
257 return attempt.get(self._wrap_exception)
259 if self._after_attempts:
260 self._after_attempts(attempt_number)
262 delay_since_first_attempt_ms = int(round(time.time() * 1000)) - start_time
263 if self.stop(attempt_number, delay_since_first_attempt_ms):
264 if not self._wrap_exception and attempt.has_exception:
265 # get() on an attempt with an exception should cause it to be raised, but raise just in case
266 raise attempt.get()
267 else:
268 raise RetryError(attempt)
269 else:
270 sleep = self.wait(attempt_number, delay_since_first_attempt_ms)
271 if self._wait_jitter_max:
272 jitter = random.random() * self._wait_jitter_max
273 sleep = sleep + max(0, jitter)
274 time.sleep(sleep / 1000.0)
276 attempt_number += 1
279class Attempt(object):
280 """
281 An Attempt encapsulates a call to a target function that may end as a
282 normal return value from the function or an Exception depending on what
283 occurred during the execution.
284 """
286 def __init__(self, value, attempt_number, has_exception):
287 self.value = value
288 self.attempt_number = attempt_number
289 self.has_exception = has_exception
291 def get(self, wrap_exception=False):
292 """
293 Return the return value of this Attempt instance or raise an Exception.
294 If wrap_exception is true, this Attempt is wrapped inside of a
295 RetryError before being raised.
296 """
297 if self.has_exception:
298 if wrap_exception:
299 raise RetryError(self)
300 else:
301 six.reraise(self.value[0], self.value[1], self.value[2])
302 else:
303 return self.value
305 def __repr__(self):
306 if self.has_exception:
307 return "Attempts: {0}, Error:\n{1}".format(
308 self.attempt_number, "".join(traceback.format_tb(self.value[2]))
309 )
310 else:
311 return "Attempts: {0}, Value: {1}".format(self.attempt_number, self.value)
314class RetryError(Exception):
315 """
316 A RetryError encapsulates the last Attempt instance right before giving up.
317 """
319 def __init__(self, last_attempt):
320 self.last_attempt = last_attempt
322 def __str__(self):
323 return "RetryError[{0}]".format(self.last_attempt)