Coverage for /pythoncovmergedfiles/medio/medio/src/aiohttp/aiohttp/web_fileresponse.py: 17%
146 statements
« prev ^ index » next coverage.py v7.4.0, created at 2024-01-26 06:16 +0000
« prev ^ index » next coverage.py v7.4.0, created at 2024-01-26 06:16 +0000
1import asyncio
2import mimetypes
3import os
4import pathlib
5from typing import (
6 IO,
7 TYPE_CHECKING,
8 Any,
9 Awaitable,
10 Callable,
11 Final,
12 Optional,
13 Tuple,
14 cast,
15)
17from . import hdrs
18from .abc import AbstractStreamWriter
19from .helpers import ETAG_ANY, ETag, must_be_empty_body
20from .typedefs import LooseHeaders, PathLike
21from .web_exceptions import (
22 HTTPNotModified,
23 HTTPPartialContent,
24 HTTPPreconditionFailed,
25 HTTPRequestRangeNotSatisfiable,
26)
27from .web_response import StreamResponse
29__all__ = ("FileResponse",)
31if TYPE_CHECKING:
32 from .web_request import BaseRequest
35_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]]
38NOSENDFILE: Final[bool] = bool(os.environ.get("AIOHTTP_NOSENDFILE"))
41class FileResponse(StreamResponse):
42 """A response object can be used to send files."""
44 def __init__(
45 self,
46 path: PathLike,
47 chunk_size: int = 256 * 1024,
48 status: int = 200,
49 reason: Optional[str] = None,
50 headers: Optional[LooseHeaders] = None,
51 ) -> None:
52 super().__init__(status=status, reason=reason, headers=headers)
54 self._path = pathlib.Path(path)
55 self._chunk_size = chunk_size
57 async def _sendfile_fallback(
58 self, writer: AbstractStreamWriter, fobj: IO[Any], offset: int, count: int
59 ) -> AbstractStreamWriter:
60 # To keep memory usage low,fobj is transferred in chunks
61 # controlled by the constructor's chunk_size argument.
63 chunk_size = self._chunk_size
64 loop = asyncio.get_event_loop()
66 await loop.run_in_executor(None, fobj.seek, offset)
68 chunk = await loop.run_in_executor(None, fobj.read, chunk_size)
69 while chunk:
70 await writer.write(chunk)
71 count = count - chunk_size
72 if count <= 0:
73 break
74 chunk = await loop.run_in_executor(None, fobj.read, min(chunk_size, count))
76 await writer.drain()
77 return writer
79 async def _sendfile(
80 self, request: "BaseRequest", fobj: IO[Any], offset: int, count: int
81 ) -> AbstractStreamWriter:
82 writer = await super().prepare(request)
83 assert writer is not None
85 if NOSENDFILE or self.compression:
86 return await self._sendfile_fallback(writer, fobj, offset, count)
88 loop = request._loop
89 transport = request.transport
90 assert transport is not None
92 try:
93 await loop.sendfile(transport, fobj, offset, count)
94 except NotImplementedError:
95 return await self._sendfile_fallback(writer, fobj, offset, count)
97 await super().write_eof()
98 return writer
100 @staticmethod
101 def _strong_etag_match(etag_value: str, etags: Tuple[ETag, ...]) -> bool:
102 if len(etags) == 1 and etags[0].value == ETAG_ANY:
103 return True
104 return any(etag.value == etag_value for etag in etags if not etag.is_weak)
106 async def _not_modified(
107 self, request: "BaseRequest", etag_value: str, last_modified: float
108 ) -> Optional[AbstractStreamWriter]:
109 self.set_status(HTTPNotModified.status_code)
110 self._length_check = False
111 self.etag = etag_value # type: ignore[assignment]
112 self.last_modified = last_modified # type: ignore[assignment]
113 # Delete any Content-Length headers provided by user. HTTP 304
114 # should always have empty response body
115 return await super().prepare(request)
117 async def _precondition_failed(
118 self, request: "BaseRequest"
119 ) -> Optional[AbstractStreamWriter]:
120 self.set_status(HTTPPreconditionFailed.status_code)
121 self.content_length = 0
122 return await super().prepare(request)
124 def _get_file_path_stat_and_gzip(
125 self, check_for_gzipped_file: bool
126 ) -> Tuple[pathlib.Path, os.stat_result, bool]:
127 """Return the file path, stat result, and gzip status.
129 This method should be called from a thread executor
130 since it calls os.stat which may block.
131 """
132 filepath = self._path
133 if check_for_gzipped_file:
134 gzip_path = filepath.with_name(filepath.name + ".gz")
135 try:
136 return gzip_path, gzip_path.stat(), True
137 except OSError:
138 # Fall through and try the non-gzipped file
139 pass
141 return filepath, filepath.stat(), False
143 async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]:
144 loop = asyncio.get_event_loop()
145 check_for_gzipped_file = "gzip" in request.headers.get(hdrs.ACCEPT_ENCODING, "")
146 filepath, st, gzip = await loop.run_in_executor(
147 None, self._get_file_path_stat_and_gzip, check_for_gzipped_file
148 )
150 etag_value = f"{st.st_mtime_ns:x}-{st.st_size:x}"
151 last_modified = st.st_mtime
153 # https://tools.ietf.org/html/rfc7232#section-6
154 ifmatch = request.if_match
155 if ifmatch is not None and not self._strong_etag_match(etag_value, ifmatch):
156 return await self._precondition_failed(request)
158 unmodsince = request.if_unmodified_since
159 if (
160 unmodsince is not None
161 and ifmatch is None
162 and st.st_mtime > unmodsince.timestamp()
163 ):
164 return await self._precondition_failed(request)
166 ifnonematch = request.if_none_match
167 if ifnonematch is not None and self._strong_etag_match(etag_value, ifnonematch):
168 return await self._not_modified(request, etag_value, last_modified)
170 modsince = request.if_modified_since
171 if (
172 modsince is not None
173 and ifnonematch is None
174 and st.st_mtime <= modsince.timestamp()
175 ):
176 return await self._not_modified(request, etag_value, last_modified)
178 ct = None
179 if hdrs.CONTENT_TYPE not in self.headers:
180 ct, encoding = mimetypes.guess_type(str(filepath))
181 if not ct:
182 ct = "application/octet-stream"
183 else:
184 encoding = "gzip" if gzip else None
186 status = self._status
187 file_size = st.st_size
188 count = file_size
190 start = None
192 ifrange = request.if_range
193 if ifrange is None or st.st_mtime <= ifrange.timestamp():
194 # If-Range header check:
195 # condition = cached date >= last modification date
196 # return 206 if True else 200.
197 # if False:
198 # Range header would not be processed, return 200
199 # if True but Range header missing
200 # return 200
201 try:
202 rng = request.http_range
203 start = rng.start
204 end = rng.stop
205 except ValueError:
206 # https://tools.ietf.org/html/rfc7233:
207 # A server generating a 416 (Range Not Satisfiable) response to
208 # a byte-range request SHOULD send a Content-Range header field
209 # with an unsatisfied-range value.
210 # The complete-length in a 416 response indicates the current
211 # length of the selected representation.
212 #
213 # Will do the same below. Many servers ignore this and do not
214 # send a Content-Range header with HTTP 416
215 self.headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}"
216 self.set_status(HTTPRequestRangeNotSatisfiable.status_code)
217 return await super().prepare(request)
219 # If a range request has been made, convert start, end slice
220 # notation into file pointer offset and count
221 if start is not None or end is not None:
222 if start < 0 and end is None: # return tail of file
223 start += file_size
224 if start < 0:
225 # if Range:bytes=-1000 in request header but file size
226 # is only 200, there would be trouble without this
227 start = 0
228 count = file_size - start
229 else:
230 # rfc7233:If the last-byte-pos value is
231 # absent, or if the value is greater than or equal to
232 # the current length of the representation data,
233 # the byte range is interpreted as the remainder
234 # of the representation (i.e., the server replaces the
235 # value of last-byte-pos with a value that is one less than
236 # the current length of the selected representation).
237 count = (
238 min(end if end is not None else file_size, file_size) - start
239 )
241 if start >= file_size:
242 # HTTP 416 should be returned in this case.
243 #
244 # According to https://tools.ietf.org/html/rfc7233:
245 # If a valid byte-range-set includes at least one
246 # byte-range-spec with a first-byte-pos that is less than
247 # the current length of the representation, or at least one
248 # suffix-byte-range-spec with a non-zero suffix-length,
249 # then the byte-range-set is satisfiable. Otherwise, the
250 # byte-range-set is unsatisfiable.
251 self.headers[hdrs.CONTENT_RANGE] = f"bytes */{file_size}"
252 self.set_status(HTTPRequestRangeNotSatisfiable.status_code)
253 return await super().prepare(request)
255 status = HTTPPartialContent.status_code
256 # Even though you are sending the whole file, you should still
257 # return a HTTP 206 for a Range request.
258 self.set_status(status)
260 if ct:
261 self.content_type = ct
262 if encoding:
263 self.headers[hdrs.CONTENT_ENCODING] = encoding
264 if gzip:
265 self.headers[hdrs.VARY] = hdrs.ACCEPT_ENCODING
266 # Disable compression if we are already sending
267 # a compressed file since we don't want to double
268 # compress.
269 self._compression = False
271 self.etag = etag_value # type: ignore[assignment]
272 self.last_modified = st.st_mtime # type: ignore[assignment]
273 self.content_length = count
275 self.headers[hdrs.ACCEPT_RANGES] = "bytes"
277 real_start = cast(int, start)
279 if status == HTTPPartialContent.status_code:
280 self.headers[hdrs.CONTENT_RANGE] = "bytes {}-{}/{}".format(
281 real_start, real_start + count - 1, file_size
282 )
284 # If we are sending 0 bytes calling sendfile() will throw a ValueError
285 if count == 0 or must_be_empty_body(request.method, self.status):
286 return await super().prepare(request)
288 fobj = await loop.run_in_executor(None, filepath.open, "rb")
289 if start: # be aware that start could be None or int=0 here.
290 offset = start
291 else:
292 offset = 0
294 try:
295 return await self._sendfile(request, fobj, offset, count)
296 finally:
297 await asyncio.shield(loop.run_in_executor(None, fobj.close))