1# Copyright 2017 Google Inc.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14
15"""Support for downloading media from Google APIs."""
16
17import urllib3.response # type: ignore
18import http
19
20from google.resumable_media import _download
21from google.resumable_media import common
22from google.resumable_media import _helpers
23from google.resumable_media.requests import _request_helpers
24
25
26_CHECKSUM_MISMATCH = """\
27Checksum mismatch while downloading:
28
29 {}
30
31The X-Goog-Hash header indicated an {checksum_type} checksum of:
32
33 {}
34
35but the actual {checksum_type} checksum of the downloaded contents was:
36
37 {}
38"""
39
40_STREAM_SEEK_ERROR = """\
41Incomplete download for:
42{}
43Error writing to stream while handling a gzip-compressed file download.
44Please restart the download.
45"""
46
47_RESPONSE_HEADERS_INFO = """\
48
49The X-Goog-Stored-Content-Length is {}. The X-Goog-Stored-Content-Encoding is {}.
50
51The download request read {} bytes of data.
52If the download was incomplete, please check the network connection and restart the download.
53"""
54
55
56class Download(_request_helpers.RequestsMixin, _download.Download):
57 """Helper to manage downloading a resource from a Google API.
58
59 "Slices" of the resource can be retrieved by specifying a range
60 with ``start`` and / or ``end``. However, in typical usage, neither
61 ``start`` nor ``end`` is expected to be provided.
62
63 Args:
64 media_url (str): The URL containing the media to be downloaded.
65 stream (IO[bytes]): A write-able stream (i.e. file-like object) that
66 the downloaded resource can be written to.
67 start (int): The first byte in a range to be downloaded. If not
68 provided, but ``end`` is provided, will download from the
69 beginning to ``end`` of the media.
70 end (int): The last byte in a range to be downloaded. If not
71 provided, but ``start`` is provided, will download from the
72 ``start`` to the end of the media.
73 headers (Optional[Mapping[str, str]]): Extra headers that should
74 be sent with the request, e.g. headers for encrypted data.
75 checksum Optional([str]): The type of checksum to compute to verify
76 the integrity of the object. The response headers must contain
77 a checksum of the requested type. If the headers lack an
78 appropriate checksum (for instance in the case of transcoded or
79 ranged downloads where the remote service does not know the
80 correct checksum) an INFO-level log will be emitted. Supported
81 values are "md5", "crc32c" and None. The default is "md5".
82
83 Attributes:
84 media_url (str): The URL containing the media to be downloaded.
85 start (Optional[int]): The first byte in a range to be downloaded.
86 end (Optional[int]): The last byte in a range to be downloaded.
87 """
88
89 def _write_to_stream(self, response):
90 """Write response body to a write-able stream.
91
92 .. note:
93
94 This method assumes that the ``_stream`` attribute is set on the
95 current download.
96
97 Args:
98 response (~requests.Response): The HTTP response object.
99
100 Raises:
101 ~google.resumable_media.common.DataCorruption: If the download's
102 checksum doesn't agree with server-computed checksum.
103 """
104
105 # Retrieve the expected checksum only once for the download request,
106 # then compute and validate the checksum when the full download completes.
107 # Retried requests are range requests, and there's no way to detect
108 # data corruption for that byte range alone.
109 if self._expected_checksum is None and self._checksum_object is None:
110 # `_get_expected_checksum()` may return None even if a checksum was
111 # requested, in which case it will emit an info log _MISSING_CHECKSUM.
112 # If an invalid checksum type is specified, this will raise ValueError.
113 expected_checksum, checksum_object = _helpers._get_expected_checksum(
114 response, self._get_headers, self.media_url, checksum_type=self.checksum
115 )
116 self._expected_checksum = expected_checksum
117 self._checksum_object = checksum_object
118 else:
119 expected_checksum = self._expected_checksum
120 checksum_object = self._checksum_object
121
122 with response:
123 # NOTE: In order to handle compressed streams gracefully, we try
124 # to insert our checksum object into the decompression stream. If
125 # the stream is indeed compressed, this will delegate the checksum
126 # object to the decoder and return a _DoNothingHash here.
127 local_checksum_object = _add_decoder(response.raw, checksum_object)
128 body_iter = response.iter_content(
129 chunk_size=_request_helpers._SINGLE_GET_CHUNK_SIZE, decode_unicode=False
130 )
131 for chunk in body_iter:
132 self._stream.write(chunk)
133 self._bytes_downloaded += len(chunk)
134 local_checksum_object.update(chunk)
135
136 # Don't validate the checksum for partial responses.
137 if (
138 expected_checksum is not None
139 and response.status_code != http.client.PARTIAL_CONTENT
140 ):
141 actual_checksum = _helpers.prepare_checksum_digest(checksum_object.digest())
142
143 if actual_checksum != expected_checksum:
144 headers = self._get_headers(response)
145 x_goog_encoding = headers.get("x-goog-stored-content-encoding")
146 x_goog_length = headers.get("x-goog-stored-content-length")
147 content_length_msg = _RESPONSE_HEADERS_INFO.format(
148 x_goog_length, x_goog_encoding, self._bytes_downloaded
149 )
150 if (
151 x_goog_length
152 and self._bytes_downloaded < int(x_goog_length)
153 and x_goog_encoding != "gzip"
154 ):
155 # The library will attempt to trigger a retry by raising a ConnectionError, if
156 # (a) bytes_downloaded is less than response header x-goog-stored-content-length, and
157 # (b) the object is not gzip-compressed when stored in Cloud Storage.
158 raise ConnectionError(content_length_msg)
159 else:
160 msg = _CHECKSUM_MISMATCH.format(
161 self.media_url,
162 expected_checksum,
163 actual_checksum,
164 checksum_type=self.checksum.upper(),
165 )
166 msg += content_length_msg
167 raise common.DataCorruption(response, msg)
168
169 def consume(
170 self,
171 transport,
172 timeout=(
173 _request_helpers._DEFAULT_CONNECT_TIMEOUT,
174 _request_helpers._DEFAULT_READ_TIMEOUT,
175 ),
176 ):
177 """Consume the resource to be downloaded.
178
179 If a ``stream`` is attached to this download, then the downloaded
180 resource will be written to the stream.
181
182 Args:
183 transport (~requests.Session): A ``requests`` object which can
184 make authenticated requests.
185 timeout (Optional[Union[float, Tuple[float, float]]]):
186 The number of seconds to wait for the server response.
187 Depending on the retry strategy, a request may be repeated
188 several times using the same timeout each time.
189
190 Can also be passed as a tuple (connect_timeout, read_timeout).
191 See :meth:`requests.Session.request` documentation for details.
192
193 Returns:
194 ~requests.Response: The HTTP response returned by ``transport``.
195
196 Raises:
197 ~google.resumable_media.common.DataCorruption: If the download's
198 checksum doesn't agree with server-computed checksum.
199 ValueError: If the current :class:`Download` has already
200 finished.
201 """
202 method, _, payload, headers = self._prepare_request()
203 # NOTE: We assume "payload is None" but pass it along anyway.
204 request_kwargs = {
205 "data": payload,
206 "headers": headers,
207 "timeout": timeout,
208 }
209 if self._stream is not None:
210 request_kwargs["stream"] = True
211
212 # Assign object generation if generation is specified in the media url.
213 if self._object_generation is None:
214 self._object_generation = _helpers._get_generation_from_url(self.media_url)
215
216 # Wrap the request business logic in a function to be retried.
217 def retriable_request():
218 url = self.media_url
219
220 # To restart an interrupted download, read from the offset of last byte
221 # received using a range request, and set object generation query param.
222 if self._bytes_downloaded > 0:
223 _download.add_bytes_range(
224 (self.start or 0) + self._bytes_downloaded, self.end, self._headers
225 )
226 request_kwargs["headers"] = self._headers
227
228 # Set object generation query param to ensure the same object content is requested.
229 if (
230 self._object_generation is not None
231 and _helpers._get_generation_from_url(self.media_url) is None
232 ):
233 query_param = {"generation": self._object_generation}
234 url = _helpers.add_query_parameters(self.media_url, query_param)
235
236 result = transport.request(method, url, **request_kwargs)
237
238 # If a generation hasn't been specified, and this is the first response we get, let's record the
239 # generation. In future requests we'll specify the generation query param to avoid data races.
240 if self._object_generation is None:
241 self._object_generation = _helpers._parse_generation_header(
242 result, self._get_headers
243 )
244
245 self._process_response(result)
246
247 # With decompressive transcoding, GCS serves back the whole file regardless of the range request,
248 # thus we reset the stream position to the start of the stream.
249 # See: https://cloud.google.com/storage/docs/transcoding#range
250 if self._stream is not None:
251 if _helpers._is_decompressive_transcoding(result, self._get_headers):
252 try:
253 self._stream.seek(0)
254 except Exception as exc:
255 msg = _STREAM_SEEK_ERROR.format(url)
256 raise Exception(msg) from exc
257 self._bytes_downloaded = 0
258
259 self._write_to_stream(result)
260
261 return result
262
263 return _request_helpers.wait_and_retry(
264 retriable_request, self._get_status_code, self._retry_strategy
265 )
266
267
268class RawDownload(_request_helpers.RawRequestsMixin, _download.Download):
269 """Helper to manage downloading a raw resource from a Google API.
270
271 "Slices" of the resource can be retrieved by specifying a range
272 with ``start`` and / or ``end``. However, in typical usage, neither
273 ``start`` nor ``end`` is expected to be provided.
274
275 Args:
276 media_url (str): The URL containing the media to be downloaded.
277 stream (IO[bytes]): A write-able stream (i.e. file-like object) that
278 the downloaded resource can be written to.
279 start (int): The first byte in a range to be downloaded. If not
280 provided, but ``end`` is provided, will download from the
281 beginning to ``end`` of the media.
282 end (int): The last byte in a range to be downloaded. If not
283 provided, but ``start`` is provided, will download from the
284 ``start`` to the end of the media.
285 headers (Optional[Mapping[str, str]]): Extra headers that should
286 be sent with the request, e.g. headers for encrypted data.
287 checksum Optional([str]): The type of checksum to compute to verify
288 the integrity of the object. The response headers must contain
289 a checksum of the requested type. If the headers lack an
290 appropriate checksum (for instance in the case of transcoded or
291 ranged downloads where the remote service does not know the
292 correct checksum) an INFO-level log will be emitted. Supported
293 values are "md5", "crc32c" and None. The default is "md5".
294 Attributes:
295 media_url (str): The URL containing the media to be downloaded.
296 start (Optional[int]): The first byte in a range to be downloaded.
297 end (Optional[int]): The last byte in a range to be downloaded.
298 """
299
300 def _write_to_stream(self, response):
301 """Write response body to a write-able stream.
302
303 .. note:
304
305 This method assumes that the ``_stream`` attribute is set on the
306 current download.
307
308 Args:
309 response (~requests.Response): The HTTP response object.
310
311 Raises:
312 ~google.resumable_media.common.DataCorruption: If the download's
313 checksum doesn't agree with server-computed checksum.
314 """
315 # Retrieve the expected checksum only once for the download request,
316 # then compute and validate the checksum when the full download completes.
317 # Retried requests are range requests, and there's no way to detect
318 # data corruption for that byte range alone.
319 if self._expected_checksum is None and self._checksum_object is None:
320 # `_get_expected_checksum()` may return None even if a checksum was
321 # requested, in which case it will emit an info log _MISSING_CHECKSUM.
322 # If an invalid checksum type is specified, this will raise ValueError.
323 expected_checksum, checksum_object = _helpers._get_expected_checksum(
324 response, self._get_headers, self.media_url, checksum_type=self.checksum
325 )
326 self._expected_checksum = expected_checksum
327 self._checksum_object = checksum_object
328 else:
329 expected_checksum = self._expected_checksum
330 checksum_object = self._checksum_object
331
332 with response:
333 body_iter = response.raw.stream(
334 _request_helpers._SINGLE_GET_CHUNK_SIZE, decode_content=False
335 )
336 for chunk in body_iter:
337 self._stream.write(chunk)
338 self._bytes_downloaded += len(chunk)
339 checksum_object.update(chunk)
340 response._content_consumed = True
341
342 # Don't validate the checksum for partial responses.
343 if (
344 expected_checksum is not None
345 and response.status_code != http.client.PARTIAL_CONTENT
346 ):
347 actual_checksum = _helpers.prepare_checksum_digest(checksum_object.digest())
348
349 if actual_checksum != expected_checksum:
350 headers = self._get_headers(response)
351 x_goog_encoding = headers.get("x-goog-stored-content-encoding")
352 x_goog_length = headers.get("x-goog-stored-content-length")
353 content_length_msg = _RESPONSE_HEADERS_INFO.format(
354 x_goog_length, x_goog_encoding, self._bytes_downloaded
355 )
356 if (
357 x_goog_length
358 and self._bytes_downloaded < int(x_goog_length)
359 and x_goog_encoding != "gzip"
360 ):
361 # The library will attempt to trigger a retry by raising a ConnectionError, if
362 # (a) bytes_downloaded is less than response header x-goog-stored-content-length, and
363 # (b) the object is not gzip-compressed when stored in Cloud Storage.
364 raise ConnectionError(content_length_msg)
365 else:
366 msg = _CHECKSUM_MISMATCH.format(
367 self.media_url,
368 expected_checksum,
369 actual_checksum,
370 checksum_type=self.checksum.upper(),
371 )
372 msg += content_length_msg
373 raise common.DataCorruption(response, msg)
374
375 def consume(
376 self,
377 transport,
378 timeout=(
379 _request_helpers._DEFAULT_CONNECT_TIMEOUT,
380 _request_helpers._DEFAULT_READ_TIMEOUT,
381 ),
382 ):
383 """Consume the resource to be downloaded.
384
385 If a ``stream`` is attached to this download, then the downloaded
386 resource will be written to the stream.
387
388 Args:
389 transport (~requests.Session): A ``requests`` object which can
390 make authenticated requests.
391 timeout (Optional[Union[float, Tuple[float, float]]]):
392 The number of seconds to wait for the server response.
393 Depending on the retry strategy, a request may be repeated
394 several times using the same timeout each time.
395
396 Can also be passed as a tuple (connect_timeout, read_timeout).
397 See :meth:`requests.Session.request` documentation for details.
398
399 Returns:
400 ~requests.Response: The HTTP response returned by ``transport``.
401
402 Raises:
403 ~google.resumable_media.common.DataCorruption: If the download's
404 checksum doesn't agree with server-computed checksum.
405 ValueError: If the current :class:`Download` has already
406 finished.
407 """
408 method, _, payload, headers = self._prepare_request()
409 # NOTE: We assume "payload is None" but pass it along anyway.
410 request_kwargs = {
411 "data": payload,
412 "headers": headers,
413 "timeout": timeout,
414 "stream": True,
415 }
416
417 # Assign object generation if generation is specified in the media url.
418 if self._object_generation is None:
419 self._object_generation = _helpers._get_generation_from_url(self.media_url)
420
421 # Wrap the request business logic in a function to be retried.
422 def retriable_request():
423 url = self.media_url
424
425 # To restart an interrupted download, read from the offset of last byte
426 # received using a range request, and set object generation query param.
427 if self._bytes_downloaded > 0:
428 _download.add_bytes_range(
429 (self.start or 0) + self._bytes_downloaded, self.end, self._headers
430 )
431 request_kwargs["headers"] = self._headers
432
433 # Set object generation query param to ensure the same object content is requested.
434 if (
435 self._object_generation is not None
436 and _helpers._get_generation_from_url(self.media_url) is None
437 ):
438 query_param = {"generation": self._object_generation}
439 url = _helpers.add_query_parameters(self.media_url, query_param)
440
441 result = transport.request(method, url, **request_kwargs)
442
443 # If a generation hasn't been specified, and this is the first response we get, let's record the
444 # generation. In future requests we'll specify the generation query param to avoid data races.
445 if self._object_generation is None:
446 self._object_generation = _helpers._parse_generation_header(
447 result, self._get_headers
448 )
449
450 self._process_response(result)
451
452 # With decompressive transcoding, GCS serves back the whole file regardless of the range request,
453 # thus we reset the stream position to the start of the stream.
454 # See: https://cloud.google.com/storage/docs/transcoding#range
455 if self._stream is not None:
456 if _helpers._is_decompressive_transcoding(result, self._get_headers):
457 try:
458 self._stream.seek(0)
459 except Exception as exc:
460 msg = _STREAM_SEEK_ERROR.format(url)
461 raise Exception(msg) from exc
462 self._bytes_downloaded = 0
463
464 self._write_to_stream(result)
465
466 return result
467
468 return _request_helpers.wait_and_retry(
469 retriable_request, self._get_status_code, self._retry_strategy
470 )
471
472
473class ChunkedDownload(_request_helpers.RequestsMixin, _download.ChunkedDownload):
474 """Download a resource in chunks from a Google API.
475
476 Args:
477 media_url (str): The URL containing the media to be downloaded.
478 chunk_size (int): The number of bytes to be retrieved in each
479 request.
480 stream (IO[bytes]): A write-able stream (i.e. file-like object) that
481 will be used to concatenate chunks of the resource as they are
482 downloaded.
483 start (int): The first byte in a range to be downloaded. If not
484 provided, defaults to ``0``.
485 end (int): The last byte in a range to be downloaded. If not
486 provided, will download to the end of the media.
487 headers (Optional[Mapping[str, str]]): Extra headers that should
488 be sent with each request, e.g. headers for data encryption
489 key headers.
490
491 Attributes:
492 media_url (str): The URL containing the media to be downloaded.
493 start (Optional[int]): The first byte in a range to be downloaded.
494 end (Optional[int]): The last byte in a range to be downloaded.
495 chunk_size (int): The number of bytes to be retrieved in each request.
496
497 Raises:
498 ValueError: If ``start`` is negative.
499 """
500
501 def consume_next_chunk(
502 self,
503 transport,
504 timeout=(
505 _request_helpers._DEFAULT_CONNECT_TIMEOUT,
506 _request_helpers._DEFAULT_READ_TIMEOUT,
507 ),
508 ):
509 """Consume the next chunk of the resource to be downloaded.
510
511 Args:
512 transport (~requests.Session): A ``requests`` object which can
513 make authenticated requests.
514 timeout (Optional[Union[float, Tuple[float, float]]]):
515 The number of seconds to wait for the server response.
516 Depending on the retry strategy, a request may be repeated
517 several times using the same timeout each time.
518
519 Can also be passed as a tuple (connect_timeout, read_timeout).
520 See :meth:`requests.Session.request` documentation for details.
521
522 Returns:
523 ~requests.Response: The HTTP response returned by ``transport``.
524
525 Raises:
526 ValueError: If the current download has finished.
527 """
528 method, url, payload, headers = self._prepare_request()
529
530 # Wrap the request business logic in a function to be retried.
531 def retriable_request():
532 # NOTE: We assume "payload is None" but pass it along anyway.
533 result = transport.request(
534 method,
535 url,
536 data=payload,
537 headers=headers,
538 timeout=timeout,
539 )
540 self._process_response(result)
541 return result
542
543 return _request_helpers.wait_and_retry(
544 retriable_request, self._get_status_code, self._retry_strategy
545 )
546
547
548class RawChunkedDownload(_request_helpers.RawRequestsMixin, _download.ChunkedDownload):
549 """Download a raw resource in chunks from a Google API.
550
551 Args:
552 media_url (str): The URL containing the media to be downloaded.
553 chunk_size (int): The number of bytes to be retrieved in each
554 request.
555 stream (IO[bytes]): A write-able stream (i.e. file-like object) that
556 will be used to concatenate chunks of the resource as they are
557 downloaded.
558 start (int): The first byte in a range to be downloaded. If not
559 provided, defaults to ``0``.
560 end (int): The last byte in a range to be downloaded. If not
561 provided, will download to the end of the media.
562 headers (Optional[Mapping[str, str]]): Extra headers that should
563 be sent with each request, e.g. headers for data encryption
564 key headers.
565
566 Attributes:
567 media_url (str): The URL containing the media to be downloaded.
568 start (Optional[int]): The first byte in a range to be downloaded.
569 end (Optional[int]): The last byte in a range to be downloaded.
570 chunk_size (int): The number of bytes to be retrieved in each request.
571
572 Raises:
573 ValueError: If ``start`` is negative.
574 """
575
576 def consume_next_chunk(
577 self,
578 transport,
579 timeout=(
580 _request_helpers._DEFAULT_CONNECT_TIMEOUT,
581 _request_helpers._DEFAULT_READ_TIMEOUT,
582 ),
583 ):
584 """Consume the next chunk of the resource to be downloaded.
585
586 Args:
587 transport (~requests.Session): A ``requests`` object which can
588 make authenticated requests.
589 timeout (Optional[Union[float, Tuple[float, float]]]):
590 The number of seconds to wait for the server response.
591 Depending on the retry strategy, a request may be repeated
592 several times using the same timeout each time.
593
594 Can also be passed as a tuple (connect_timeout, read_timeout).
595 See :meth:`requests.Session.request` documentation for details.
596
597 Returns:
598 ~requests.Response: The HTTP response returned by ``transport``.
599
600 Raises:
601 ValueError: If the current download has finished.
602 """
603 method, url, payload, headers = self._prepare_request()
604
605 # Wrap the request business logic in a function to be retried.
606 def retriable_request():
607 # NOTE: We assume "payload is None" but pass it along anyway.
608 result = transport.request(
609 method,
610 url,
611 data=payload,
612 headers=headers,
613 stream=True,
614 timeout=timeout,
615 )
616 self._process_response(result)
617 return result
618
619 return _request_helpers.wait_and_retry(
620 retriable_request, self._get_status_code, self._retry_strategy
621 )
622
623
624def _add_decoder(response_raw, checksum):
625 """Patch the ``_decoder`` on a ``urllib3`` response.
626
627 This is so that we can intercept the compressed bytes before they are
628 decoded.
629
630 Only patches if the content encoding is ``gzip`` or ``br``.
631
632 Args:
633 response_raw (urllib3.response.HTTPResponse): The raw response for
634 an HTTP request.
635 checksum (object):
636 A checksum which will be updated with compressed bytes.
637
638 Returns:
639 object: Either the original ``checksum`` if ``_decoder`` is not
640 patched, or a ``_DoNothingHash`` if the decoder is patched, since the
641 caller will no longer need to hash to decoded bytes.
642 """
643 encoding = response_raw.headers.get("content-encoding", "").lower()
644 if encoding == "gzip":
645 response_raw._decoder = _GzipDecoder(checksum)
646 return _helpers._DoNothingHash()
647 # Only activate if brotli is installed
648 elif encoding == "br" and _BrotliDecoder: # type: ignore
649 response_raw._decoder = _BrotliDecoder(checksum)
650 return _helpers._DoNothingHash()
651 else:
652 return checksum
653
654
655class _GzipDecoder(urllib3.response.GzipDecoder):
656 """Custom subclass of ``urllib3`` decoder for ``gzip``-ed bytes.
657
658 Allows a checksum function to see the compressed bytes before they are
659 decoded. This way the checksum of the compressed value can be computed.
660
661 Args:
662 checksum (object):
663 A checksum which will be updated with compressed bytes.
664 """
665
666 def __init__(self, checksum):
667 super().__init__()
668 self._checksum = checksum
669
670 def decompress(self, data):
671 """Decompress the bytes.
672
673 Args:
674 data (bytes): The compressed bytes to be decompressed.
675
676 Returns:
677 bytes: The decompressed bytes from ``data``.
678 """
679 self._checksum.update(data)
680 return super().decompress(data)
681
682
683# urllib3.response.BrotliDecoder might not exist depending on whether brotli is
684# installed.
685if hasattr(urllib3.response, "BrotliDecoder"):
686
687 class _BrotliDecoder:
688 """Handler for ``brotli`` encoded bytes.
689
690 Allows a checksum function to see the compressed bytes before they are
691 decoded. This way the checksum of the compressed value can be computed.
692
693 Because BrotliDecoder's decompress method is dynamically created in
694 urllib3, a subclass is not practical. Instead, this class creates a
695 captive urllib3.requests.BrotliDecoder instance and acts as a proxy.
696
697 Args:
698 checksum (object):
699 A checksum which will be updated with compressed bytes.
700 """
701
702 def __init__(self, checksum):
703 self._decoder = urllib3.response.BrotliDecoder()
704 self._checksum = checksum
705
706 def decompress(self, data):
707 """Decompress the bytes.
708
709 Args:
710 data (bytes): The compressed bytes to be decompressed.
711
712 Returns:
713 bytes: The decompressed bytes from ``data``.
714 """
715 self._checksum.update(data)
716 return self._decoder.decompress(data)
717
718 def flush(self):
719 return self._decoder.flush()
720
721else: # pragma: NO COVER
722 _BrotliDecoder = None # type: ignore # pragma: NO COVER