Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/urllib3/connectionpool.py: 20%

343 statements  

« prev     ^ index     » next       coverage.py v7.2.7, created at 2023-06-07 06:35 +0000

1from __future__ import annotations 

2 

3import errno 

4import logging 

5import queue 

6import sys 

7import typing 

8import warnings 

9import weakref 

10from socket import timeout as SocketTimeout 

11from types import TracebackType 

12 

13from ._base_connection import _TYPE_BODY 

14from ._request_methods import RequestMethods 

15from .connection import ( 

16 BaseSSLError, 

17 BrokenPipeError, 

18 DummyConnection, 

19 HTTPConnection, 

20 HTTPException, 

21 HTTPSConnection, 

22 ProxyConfig, 

23 _wrap_proxy_error, 

24) 

25from .connection import port_by_scheme as port_by_scheme 

26from .exceptions import ( 

27 ClosedPoolError, 

28 EmptyPoolError, 

29 FullPoolError, 

30 HostChangedError, 

31 InsecureRequestWarning, 

32 LocationValueError, 

33 MaxRetryError, 

34 NewConnectionError, 

35 ProtocolError, 

36 ProxyError, 

37 ReadTimeoutError, 

38 SSLError, 

39 TimeoutError, 

40) 

41from .response import BaseHTTPResponse 

42from .util.connection import is_connection_dropped 

43from .util.proxy import connection_requires_http_tunnel 

44from .util.request import _TYPE_BODY_POSITION, set_file_position 

45from .util.retry import Retry 

46from .util.ssl_match_hostname import CertificateError 

47from .util.timeout import _DEFAULT_TIMEOUT, _TYPE_DEFAULT, Timeout 

48from .util.url import Url, _encode_target 

49from .util.url import _normalize_host as normalize_host 

50from .util.url import parse_url 

51from .util.util import to_str 

52 

53if typing.TYPE_CHECKING: 

54 import ssl 

55 

56 from typing_extensions import Literal 

57 

58 from ._base_connection import BaseHTTPConnection, BaseHTTPSConnection 

59 

60log = logging.getLogger(__name__) 

61 

62_TYPE_TIMEOUT = typing.Union[Timeout, float, _TYPE_DEFAULT, None] 

63 

64_SelfT = typing.TypeVar("_SelfT") 

65 

66 

67# Pool objects 

68class ConnectionPool: 

69 """ 

70 Base class for all connection pools, such as 

71 :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`. 

72 

73 .. note:: 

74 ConnectionPool.urlopen() does not normalize or percent-encode target URIs 

75 which is useful if your target server doesn't support percent-encoded 

76 target URIs. 

77 """ 

78 

79 scheme: str | None = None 

80 QueueCls = queue.LifoQueue 

81 

82 def __init__(self, host: str, port: int | None = None) -> None: 

83 if not host: 

84 raise LocationValueError("No host specified.") 

85 

86 self.host = _normalize_host(host, scheme=self.scheme) 

87 self.port = port 

88 

89 # This property uses 'normalize_host()' (not '_normalize_host()') 

90 # to avoid removing square braces around IPv6 addresses. 

91 # This value is sent to `HTTPConnection.set_tunnel()` if called 

92 # because square braces are required for HTTP CONNECT tunneling. 

93 self._tunnel_host = normalize_host(host, scheme=self.scheme).lower() 

94 

95 def __str__(self) -> str: 

96 return f"{type(self).__name__}(host={self.host!r}, port={self.port!r})" 

97 

98 def __enter__(self: _SelfT) -> _SelfT: 

99 return self 

100 

101 def __exit__( 

102 self, 

103 exc_type: type[BaseException] | None, 

104 exc_val: BaseException | None, 

105 exc_tb: TracebackType | None, 

106 ) -> Literal[False]: 

107 self.close() 

108 # Return False to re-raise any potential exceptions 

109 return False 

110 

111 def close(self) -> None: 

112 """ 

113 Close all pooled connections and disable the pool. 

114 """ 

115 

116 

117# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252 

118_blocking_errnos = {errno.EAGAIN, errno.EWOULDBLOCK} 

119 

120 

121class HTTPConnectionPool(ConnectionPool, RequestMethods): 

122 """ 

123 Thread-safe connection pool for one host. 

124 

125 :param host: 

126 Host used for this HTTP Connection (e.g. "localhost"), passed into 

127 :class:`http.client.HTTPConnection`. 

128 

129 :param port: 

130 Port used for this HTTP Connection (None is equivalent to 80), passed 

131 into :class:`http.client.HTTPConnection`. 

132 

133 :param timeout: 

134 Socket timeout in seconds for each individual connection. This can 

135 be a float or integer, which sets the timeout for the HTTP request, 

136 or an instance of :class:`urllib3.util.Timeout` which gives you more 

137 fine-grained control over request timeouts. After the constructor has 

138 been parsed, this is always a `urllib3.util.Timeout` object. 

139 

140 :param maxsize: 

141 Number of connections to save that can be reused. More than 1 is useful 

142 in multithreaded situations. If ``block`` is set to False, more 

143 connections will be created but they will not be saved once they've 

144 been used. 

145 

146 :param block: 

147 If set to True, no more than ``maxsize`` connections will be used at 

148 a time. When no free connections are available, the call will block 

149 until a connection has been released. This is a useful side effect for 

150 particular multithreaded situations where one does not want to use more 

151 than maxsize connections per host to prevent flooding. 

152 

153 :param headers: 

154 Headers to include with all requests, unless other headers are given 

155 explicitly. 

156 

157 :param retries: 

158 Retry configuration to use by default with requests in this pool. 

159 

160 :param _proxy: 

161 Parsed proxy URL, should not be used directly, instead, see 

162 :class:`urllib3.ProxyManager` 

163 

164 :param _proxy_headers: 

165 A dictionary with proxy headers, should not be used directly, 

166 instead, see :class:`urllib3.ProxyManager` 

167 

168 :param \\**conn_kw: 

169 Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`, 

170 :class:`urllib3.connection.HTTPSConnection` instances. 

171 """ 

172 

173 scheme = "http" 

174 ConnectionCls: ( 

175 type[BaseHTTPConnection] | type[BaseHTTPSConnection] 

176 ) = HTTPConnection 

177 

178 def __init__( 

179 self, 

180 host: str, 

181 port: int | None = None, 

182 timeout: _TYPE_TIMEOUT | None = _DEFAULT_TIMEOUT, 

183 maxsize: int = 1, 

184 block: bool = False, 

185 headers: typing.Mapping[str, str] | None = None, 

186 retries: Retry | bool | int | None = None, 

187 _proxy: Url | None = None, 

188 _proxy_headers: typing.Mapping[str, str] | None = None, 

189 _proxy_config: ProxyConfig | None = None, 

190 **conn_kw: typing.Any, 

191 ): 

192 ConnectionPool.__init__(self, host, port) 

193 RequestMethods.__init__(self, headers) 

194 

195 if not isinstance(timeout, Timeout): 

196 timeout = Timeout.from_float(timeout) 

197 

198 if retries is None: 

199 retries = Retry.DEFAULT 

200 

201 self.timeout = timeout 

202 self.retries = retries 

203 

204 self.pool: queue.LifoQueue[typing.Any] | None = self.QueueCls(maxsize) 

205 self.block = block 

206 

207 self.proxy = _proxy 

208 self.proxy_headers = _proxy_headers or {} 

209 self.proxy_config = _proxy_config 

210 

211 # Fill the queue up so that doing get() on it will block properly 

212 for _ in range(maxsize): 

213 self.pool.put(None) 

214 

215 # These are mostly for testing and debugging purposes. 

216 self.num_connections = 0 

217 self.num_requests = 0 

218 self.conn_kw = conn_kw 

219 

220 if self.proxy: 

221 # Enable Nagle's algorithm for proxies, to avoid packet fragmentation. 

222 # We cannot know if the user has added default socket options, so we cannot replace the 

223 # list. 

224 self.conn_kw.setdefault("socket_options", []) 

225 

226 self.conn_kw["proxy"] = self.proxy 

227 self.conn_kw["proxy_config"] = self.proxy_config 

228 

229 # Do not pass 'self' as callback to 'finalize'. 

230 # Then the 'finalize' would keep an endless living (leak) to self. 

231 # By just passing a reference to the pool allows the garbage collector 

232 # to free self if nobody else has a reference to it. 

233 pool = self.pool 

234 

235 # Close all the HTTPConnections in the pool before the 

236 # HTTPConnectionPool object is garbage collected. 

237 weakref.finalize(self, _close_pool_connections, pool) 

238 

239 def _new_conn(self) -> BaseHTTPConnection: 

240 """ 

241 Return a fresh :class:`HTTPConnection`. 

242 """ 

243 self.num_connections += 1 

244 log.debug( 

245 "Starting new HTTP connection (%d): %s:%s", 

246 self.num_connections, 

247 self.host, 

248 self.port or "80", 

249 ) 

250 

251 conn = self.ConnectionCls( 

252 host=self.host, 

253 port=self.port, 

254 timeout=self.timeout.connect_timeout, 

255 **self.conn_kw, 

256 ) 

257 return conn 

258 

259 def _get_conn(self, timeout: float | None = None) -> BaseHTTPConnection: 

260 """ 

261 Get a connection. Will return a pooled connection if one is available. 

262 

263 If no connections are available and :prop:`.block` is ``False``, then a 

264 fresh connection is returned. 

265 

266 :param timeout: 

267 Seconds to wait before giving up and raising 

268 :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and 

269 :prop:`.block` is ``True``. 

270 """ 

271 conn = None 

272 

273 if self.pool is None: 

274 raise ClosedPoolError(self, "Pool is closed.") 

275 

276 try: 

277 conn = self.pool.get(block=self.block, timeout=timeout) 

278 

279 except AttributeError: # self.pool is None 

280 raise ClosedPoolError(self, "Pool is closed.") from None # Defensive: 

281 

282 except queue.Empty: 

283 if self.block: 

284 raise EmptyPoolError( 

285 self, 

286 "Pool is empty and a new connection can't be opened due to blocking mode.", 

287 ) from None 

288 pass # Oh well, we'll create a new connection then 

289 

290 # If this is a persistent connection, check if it got disconnected 

291 if conn and is_connection_dropped(conn): 

292 log.debug("Resetting dropped connection: %s", self.host) 

293 conn.close() 

294 

295 return conn or self._new_conn() 

296 

297 def _put_conn(self, conn: BaseHTTPConnection | None) -> None: 

298 """ 

299 Put a connection back into the pool. 

300 

301 :param conn: 

302 Connection object for the current host and port as returned by 

303 :meth:`._new_conn` or :meth:`._get_conn`. 

304 

305 If the pool is already full, the connection is closed and discarded 

306 because we exceeded maxsize. If connections are discarded frequently, 

307 then maxsize should be increased. 

308 

309 If the pool is closed, then the connection will be closed and discarded. 

310 """ 

311 if self.pool is not None: 

312 try: 

313 self.pool.put(conn, block=False) 

314 return # Everything is dandy, done. 

315 except AttributeError: 

316 # self.pool is None. 

317 pass 

318 except queue.Full: 

319 # Connection never got put back into the pool, close it. 

320 if conn: 

321 conn.close() 

322 

323 if self.block: 

324 # This should never happen if you got the conn from self._get_conn 

325 raise FullPoolError( 

326 self, 

327 "Pool reached maximum size and no more connections are allowed.", 

328 ) from None 

329 

330 log.warning( 

331 "Connection pool is full, discarding connection: %s. Connection pool size: %s", 

332 self.host, 

333 self.pool.qsize(), 

334 ) 

335 

336 # Connection never got put back into the pool, close it. 

337 if conn: 

338 conn.close() 

339 

340 def _validate_conn(self, conn: BaseHTTPConnection) -> None: 

341 """ 

342 Called right before a request is made, after the socket is created. 

343 """ 

344 

345 def _prepare_proxy(self, conn: BaseHTTPConnection) -> None: 

346 # Nothing to do for HTTP connections. 

347 pass 

348 

349 def _get_timeout(self, timeout: _TYPE_TIMEOUT) -> Timeout: 

350 """Helper that always returns a :class:`urllib3.util.Timeout`""" 

351 if timeout is _DEFAULT_TIMEOUT: 

352 return self.timeout.clone() 

353 

354 if isinstance(timeout, Timeout): 

355 return timeout.clone() 

356 else: 

357 # User passed us an int/float. This is for backwards compatibility, 

358 # can be removed later 

359 return Timeout.from_float(timeout) 

360 

361 def _raise_timeout( 

362 self, 

363 err: BaseSSLError | OSError | SocketTimeout, 

364 url: str, 

365 timeout_value: _TYPE_TIMEOUT | None, 

366 ) -> None: 

367 """Is the error actually a timeout? Will raise a ReadTimeout or pass""" 

368 

369 if isinstance(err, SocketTimeout): 

370 raise ReadTimeoutError( 

371 self, url, f"Read timed out. (read timeout={timeout_value})" 

372 ) from err 

373 

374 # See the above comment about EAGAIN in Python 3. 

375 if hasattr(err, "errno") and err.errno in _blocking_errnos: 

376 raise ReadTimeoutError( 

377 self, url, f"Read timed out. (read timeout={timeout_value})" 

378 ) from err 

379 

380 def _make_request( 

381 self, 

382 conn: BaseHTTPConnection, 

383 method: str, 

384 url: str, 

385 body: _TYPE_BODY | None = None, 

386 headers: typing.Mapping[str, str] | None = None, 

387 retries: Retry | None = None, 

388 timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, 

389 chunked: bool = False, 

390 response_conn: BaseHTTPConnection | None = None, 

391 preload_content: bool = True, 

392 decode_content: bool = True, 

393 enforce_content_length: bool = True, 

394 ) -> BaseHTTPResponse: 

395 """ 

396 Perform a request on a given urllib connection object taken from our 

397 pool. 

398 

399 :param conn: 

400 a connection from one of our connection pools 

401 

402 :param method: 

403 HTTP request method (such as GET, POST, PUT, etc.) 

404 

405 :param url: 

406 The URL to perform the request on. 

407 

408 :param body: 

409 Data to send in the request body, either :class:`str`, :class:`bytes`, 

410 an iterable of :class:`str`/:class:`bytes`, or a file-like object. 

411 

412 :param headers: 

413 Dictionary of custom headers to send, such as User-Agent, 

414 If-None-Match, etc. If None, pool headers are used. If provided, 

415 these headers completely replace any pool-specific headers. 

416 

417 :param retries: 

418 Configure the number of retries to allow before raising a 

419 :class:`~urllib3.exceptions.MaxRetryError` exception. 

420 

421 Pass ``None`` to retry until you receive a response. Pass a 

422 :class:`~urllib3.util.retry.Retry` object for fine-grained control 

423 over different types of retries. 

424 Pass an integer number to retry connection errors that many times, 

425 but no other types of errors. Pass zero to never retry. 

426 

427 If ``False``, then retries are disabled and any exception is raised 

428 immediately. Also, instead of raising a MaxRetryError on redirects, 

429 the redirect response will be returned. 

430 

431 :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. 

432 

433 :param timeout: 

434 If specified, overrides the default timeout for this one 

435 request. It may be a float (in seconds) or an instance of 

436 :class:`urllib3.util.Timeout`. 

437 

438 :param chunked: 

439 If True, urllib3 will send the body using chunked transfer 

440 encoding. Otherwise, urllib3 will send the body using the standard 

441 content-length form. Defaults to False. 

442 

443 :param response_conn: 

444 Set this to ``None`` if you will handle releasing the connection or 

445 set the connection to have the response release it. 

446 

447 :param preload_content: 

448 If True, the response's body will be preloaded during construction. 

449 

450 :param decode_content: 

451 If True, will attempt to decode the body based on the 

452 'content-encoding' header. 

453 

454 :param enforce_content_length: 

455 Enforce content length checking. Body returned by server must match 

456 value of Content-Length header, if present. Otherwise, raise error. 

457 """ 

458 self.num_requests += 1 

459 

460 timeout_obj = self._get_timeout(timeout) 

461 timeout_obj.start_connect() 

462 conn.timeout = Timeout.resolve_default_timeout(timeout_obj.connect_timeout) 

463 

464 try: 

465 # Trigger any extra validation we need to do. 

466 try: 

467 self._validate_conn(conn) 

468 except (SocketTimeout, BaseSSLError) as e: 

469 self._raise_timeout(err=e, url=url, timeout_value=conn.timeout) 

470 raise 

471 

472 # _validate_conn() starts the connection to an HTTPS proxy 

473 # so we need to wrap errors with 'ProxyError' here too. 

474 except ( 

475 OSError, 

476 NewConnectionError, 

477 TimeoutError, 

478 BaseSSLError, 

479 CertificateError, 

480 SSLError, 

481 ) as e: 

482 new_e: Exception = e 

483 if isinstance(e, (BaseSSLError, CertificateError)): 

484 new_e = SSLError(e) 

485 # If the connection didn't successfully connect to it's proxy 

486 # then there 

487 if isinstance( 

488 new_e, (OSError, NewConnectionError, TimeoutError, SSLError) 

489 ) and (conn and conn.proxy and not conn.has_connected_to_proxy): 

490 new_e = _wrap_proxy_error(new_e, conn.proxy.scheme) 

491 raise new_e 

492 

493 # conn.request() calls http.client.*.request, not the method in 

494 # urllib3.request. It also calls makefile (recv) on the socket. 

495 try: 

496 conn.request( 

497 method, 

498 url, 

499 body=body, 

500 headers=headers, 

501 chunked=chunked, 

502 preload_content=preload_content, 

503 decode_content=decode_content, 

504 enforce_content_length=enforce_content_length, 

505 ) 

506 

507 # We are swallowing BrokenPipeError (errno.EPIPE) since the server is 

508 # legitimately able to close the connection after sending a valid response. 

509 # With this behaviour, the received response is still readable. 

510 except BrokenPipeError: 

511 pass 

512 except OSError as e: 

513 # MacOS/Linux 

514 # EPROTOTYPE is needed on macOS 

515 # https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/ 

516 if e.errno != errno.EPROTOTYPE: 

517 raise 

518 

519 # Reset the timeout for the recv() on the socket 

520 read_timeout = timeout_obj.read_timeout 

521 

522 if not conn.is_closed: 

523 # In Python 3 socket.py will catch EAGAIN and return None when you 

524 # try and read into the file pointer created by http.client, which 

525 # instead raises a BadStatusLine exception. Instead of catching 

526 # the exception and assuming all BadStatusLine exceptions are read 

527 # timeouts, check for a zero timeout before making the request. 

528 if read_timeout == 0: 

529 raise ReadTimeoutError( 

530 self, url, f"Read timed out. (read timeout={read_timeout})" 

531 ) 

532 conn.timeout = read_timeout 

533 

534 # Receive the response from the server 

535 try: 

536 response = conn.getresponse() 

537 except (BaseSSLError, OSError) as e: 

538 self._raise_timeout(err=e, url=url, timeout_value=read_timeout) 

539 raise 

540 

541 # Set properties that are used by the pooling layer. 

542 response.retries = retries 

543 response._connection = response_conn # type: ignore[attr-defined] 

544 response._pool = self # type: ignore[attr-defined] 

545 

546 log.debug( 

547 '%s://%s:%s "%s %s %s" %s %s', 

548 self.scheme, 

549 self.host, 

550 self.port, 

551 method, 

552 url, 

553 # HTTP version 

554 conn._http_vsn_str, # type: ignore[attr-defined] 

555 response.status, 

556 response.length_remaining, # type: ignore[attr-defined] 

557 ) 

558 

559 return response 

560 

561 def close(self) -> None: 

562 """ 

563 Close all pooled connections and disable the pool. 

564 """ 

565 if self.pool is None: 

566 return 

567 # Disable access to the pool 

568 old_pool, self.pool = self.pool, None 

569 

570 # Close all the HTTPConnections in the pool. 

571 _close_pool_connections(old_pool) 

572 

573 def is_same_host(self, url: str) -> bool: 

574 """ 

575 Check if the given ``url`` is a member of the same host as this 

576 connection pool. 

577 """ 

578 if url.startswith("/"): 

579 return True 

580 

581 # TODO: Add optional support for socket.gethostbyname checking. 

582 scheme, _, host, port, *_ = parse_url(url) 

583 scheme = scheme or "http" 

584 if host is not None: 

585 host = _normalize_host(host, scheme=scheme) 

586 

587 # Use explicit default port for comparison when none is given 

588 if self.port and not port: 

589 port = port_by_scheme.get(scheme) 

590 elif not self.port and port == port_by_scheme.get(scheme): 

591 port = None 

592 

593 return (scheme, host, port) == (self.scheme, self.host, self.port) 

594 

595 def urlopen( # type: ignore[override] 

596 self, 

597 method: str, 

598 url: str, 

599 body: _TYPE_BODY | None = None, 

600 headers: typing.Mapping[str, str] | None = None, 

601 retries: Retry | bool | int | None = None, 

602 redirect: bool = True, 

603 assert_same_host: bool = True, 

604 timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, 

605 pool_timeout: int | None = None, 

606 release_conn: bool | None = None, 

607 chunked: bool = False, 

608 body_pos: _TYPE_BODY_POSITION | None = None, 

609 preload_content: bool = True, 

610 decode_content: bool = True, 

611 **response_kw: typing.Any, 

612 ) -> BaseHTTPResponse: 

613 """ 

614 Get a connection from the pool and perform an HTTP request. This is the 

615 lowest level call for making a request, so you'll need to specify all 

616 the raw details. 

617 

618 .. note:: 

619 

620 More commonly, it's appropriate to use a convenience method 

621 such as :meth:`request`. 

622 

623 .. note:: 

624 

625 `release_conn` will only behave as expected if 

626 `preload_content=False` because we want to make 

627 `preload_content=False` the default behaviour someday soon without 

628 breaking backwards compatibility. 

629 

630 :param method: 

631 HTTP request method (such as GET, POST, PUT, etc.) 

632 

633 :param url: 

634 The URL to perform the request on. 

635 

636 :param body: 

637 Data to send in the request body, either :class:`str`, :class:`bytes`, 

638 an iterable of :class:`str`/:class:`bytes`, or a file-like object. 

639 

640 :param headers: 

641 Dictionary of custom headers to send, such as User-Agent, 

642 If-None-Match, etc. If None, pool headers are used. If provided, 

643 these headers completely replace any pool-specific headers. 

644 

645 :param retries: 

646 Configure the number of retries to allow before raising a 

647 :class:`~urllib3.exceptions.MaxRetryError` exception. 

648 

649 Pass ``None`` to retry until you receive a response. Pass a 

650 :class:`~urllib3.util.retry.Retry` object for fine-grained control 

651 over different types of retries. 

652 Pass an integer number to retry connection errors that many times, 

653 but no other types of errors. Pass zero to never retry. 

654 

655 If ``False``, then retries are disabled and any exception is raised 

656 immediately. Also, instead of raising a MaxRetryError on redirects, 

657 the redirect response will be returned. 

658 

659 :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. 

660 

661 :param redirect: 

662 If True, automatically handle redirects (status codes 301, 302, 

663 303, 307, 308). Each redirect counts as a retry. Disabling retries 

664 will disable redirect, too. 

665 

666 :param assert_same_host: 

667 If ``True``, will make sure that the host of the pool requests is 

668 consistent else will raise HostChangedError. When ``False``, you can 

669 use the pool on an HTTP proxy and request foreign hosts. 

670 

671 :param timeout: 

672 If specified, overrides the default timeout for this one 

673 request. It may be a float (in seconds) or an instance of 

674 :class:`urllib3.util.Timeout`. 

675 

676 :param pool_timeout: 

677 If set and the pool is set to block=True, then this method will 

678 block for ``pool_timeout`` seconds and raise EmptyPoolError if no 

679 connection is available within the time period. 

680 

681 :param bool preload_content: 

682 If True, the response's body will be preloaded into memory. 

683 

684 :param bool decode_content: 

685 If True, will attempt to decode the body based on the 

686 'content-encoding' header. 

687 

688 :param release_conn: 

689 If False, then the urlopen call will not release the connection 

690 back into the pool once a response is received (but will release if 

691 you read the entire contents of the response such as when 

692 `preload_content=True`). This is useful if you're not preloading 

693 the response's content immediately. You will need to call 

694 ``r.release_conn()`` on the response ``r`` to return the connection 

695 back into the pool. If None, it takes the value of ``preload_content`` 

696 which defaults to ``True``. 

697 

698 :param bool chunked: 

699 If True, urllib3 will send the body using chunked transfer 

700 encoding. Otherwise, urllib3 will send the body using the standard 

701 content-length form. Defaults to False. 

702 

703 :param int body_pos: 

704 Position to seek to in file-like body in the event of a retry or 

705 redirect. Typically this won't need to be set because urllib3 will 

706 auto-populate the value when needed. 

707 """ 

708 parsed_url = parse_url(url) 

709 destination_scheme = parsed_url.scheme 

710 

711 if headers is None: 

712 headers = self.headers 

713 

714 if not isinstance(retries, Retry): 

715 retries = Retry.from_int(retries, redirect=redirect, default=self.retries) 

716 

717 if release_conn is None: 

718 release_conn = preload_content 

719 

720 # Check host 

721 if assert_same_host and not self.is_same_host(url): 

722 raise HostChangedError(self, url, retries) 

723 

724 # Ensure that the URL we're connecting to is properly encoded 

725 if url.startswith("/"): 

726 url = to_str(_encode_target(url)) 

727 else: 

728 url = to_str(parsed_url.url) 

729 

730 conn = None 

731 

732 # Track whether `conn` needs to be released before 

733 # returning/raising/recursing. Update this variable if necessary, and 

734 # leave `release_conn` constant throughout the function. That way, if 

735 # the function recurses, the original value of `release_conn` will be 

736 # passed down into the recursive call, and its value will be respected. 

737 # 

738 # See issue #651 [1] for details. 

739 # 

740 # [1] <https://github.com/urllib3/urllib3/issues/651> 

741 release_this_conn = release_conn 

742 

743 http_tunnel_required = connection_requires_http_tunnel( 

744 self.proxy, self.proxy_config, destination_scheme 

745 ) 

746 

747 # Merge the proxy headers. Only done when not using HTTP CONNECT. We 

748 # have to copy the headers dict so we can safely change it without those 

749 # changes being reflected in anyone else's copy. 

750 if not http_tunnel_required: 

751 headers = headers.copy() # type: ignore[attr-defined] 

752 headers.update(self.proxy_headers) # type: ignore[union-attr] 

753 

754 # Must keep the exception bound to a separate variable or else Python 3 

755 # complains about UnboundLocalError. 

756 err = None 

757 

758 # Keep track of whether we cleanly exited the except block. This 

759 # ensures we do proper cleanup in finally. 

760 clean_exit = False 

761 

762 # Rewind body position, if needed. Record current position 

763 # for future rewinds in the event of a redirect/retry. 

764 body_pos = set_file_position(body, body_pos) 

765 

766 try: 

767 # Request a connection from the queue. 

768 timeout_obj = self._get_timeout(timeout) 

769 conn = self._get_conn(timeout=pool_timeout) 

770 

771 conn.timeout = timeout_obj.connect_timeout # type: ignore[assignment] 

772 

773 # Is this a closed/new connection that requires CONNECT tunnelling? 

774 if self.proxy is not None and http_tunnel_required and conn.is_closed: 

775 try: 

776 self._prepare_proxy(conn) 

777 except (BaseSSLError, OSError, SocketTimeout) as e: 

778 self._raise_timeout( 

779 err=e, url=self.proxy.url, timeout_value=conn.timeout 

780 ) 

781 raise 

782 

783 # If we're going to release the connection in ``finally:``, then 

784 # the response doesn't need to know about the connection. Otherwise 

785 # it will also try to release it and we'll have a double-release 

786 # mess. 

787 response_conn = conn if not release_conn else None 

788 

789 # Make the request on the HTTPConnection object 

790 response = self._make_request( 

791 conn, 

792 method, 

793 url, 

794 timeout=timeout_obj, 

795 body=body, 

796 headers=headers, 

797 chunked=chunked, 

798 retries=retries, 

799 response_conn=response_conn, 

800 preload_content=preload_content, 

801 decode_content=decode_content, 

802 **response_kw, 

803 ) 

804 

805 # Everything went great! 

806 clean_exit = True 

807 

808 except EmptyPoolError: 

809 # Didn't get a connection from the pool, no need to clean up 

810 clean_exit = True 

811 release_this_conn = False 

812 raise 

813 

814 except ( 

815 TimeoutError, 

816 HTTPException, 

817 OSError, 

818 ProtocolError, 

819 BaseSSLError, 

820 SSLError, 

821 CertificateError, 

822 ProxyError, 

823 ) as e: 

824 # Discard the connection for these exceptions. It will be 

825 # replaced during the next _get_conn() call. 

826 clean_exit = False 

827 new_e: Exception = e 

828 if isinstance(e, (BaseSSLError, CertificateError)): 

829 new_e = SSLError(e) 

830 if isinstance( 

831 new_e, 

832 ( 

833 OSError, 

834 NewConnectionError, 

835 TimeoutError, 

836 SSLError, 

837 HTTPException, 

838 ), 

839 ) and (conn and conn.proxy and not conn.has_connected_to_proxy): 

840 new_e = _wrap_proxy_error(new_e, conn.proxy.scheme) 

841 elif isinstance(new_e, (OSError, HTTPException)): 

842 new_e = ProtocolError("Connection aborted.", new_e) 

843 

844 retries = retries.increment( 

845 method, url, error=new_e, _pool=self, _stacktrace=sys.exc_info()[2] 

846 ) 

847 retries.sleep() 

848 

849 # Keep track of the error for the retry warning. 

850 err = e 

851 

852 finally: 

853 if not clean_exit: 

854 # We hit some kind of exception, handled or otherwise. We need 

855 # to throw the connection away unless explicitly told not to. 

856 # Close the connection, set the variable to None, and make sure 

857 # we put the None back in the pool to avoid leaking it. 

858 if conn: 

859 conn.close() 

860 conn = None 

861 release_this_conn = True 

862 

863 if release_this_conn: 

864 # Put the connection back to be reused. If the connection is 

865 # expired then it will be None, which will get replaced with a 

866 # fresh connection during _get_conn. 

867 self._put_conn(conn) 

868 

869 if not conn: 

870 # Try again 

871 log.warning( 

872 "Retrying (%r) after connection broken by '%r': %s", retries, err, url 

873 ) 

874 return self.urlopen( 

875 method, 

876 url, 

877 body, 

878 headers, 

879 retries, 

880 redirect, 

881 assert_same_host, 

882 timeout=timeout, 

883 pool_timeout=pool_timeout, 

884 release_conn=release_conn, 

885 chunked=chunked, 

886 body_pos=body_pos, 

887 preload_content=preload_content, 

888 decode_content=decode_content, 

889 **response_kw, 

890 ) 

891 

892 # Handle redirect? 

893 redirect_location = redirect and response.get_redirect_location() 

894 if redirect_location: 

895 if response.status == 303: 

896 method = "GET" 

897 

898 try: 

899 retries = retries.increment(method, url, response=response, _pool=self) 

900 except MaxRetryError: 

901 if retries.raise_on_redirect: 

902 response.drain_conn() 

903 raise 

904 return response 

905 

906 response.drain_conn() 

907 retries.sleep_for_retry(response) 

908 log.debug("Redirecting %s -> %s", url, redirect_location) 

909 return self.urlopen( 

910 method, 

911 redirect_location, 

912 body, 

913 headers, 

914 retries=retries, 

915 redirect=redirect, 

916 assert_same_host=assert_same_host, 

917 timeout=timeout, 

918 pool_timeout=pool_timeout, 

919 release_conn=release_conn, 

920 chunked=chunked, 

921 body_pos=body_pos, 

922 preload_content=preload_content, 

923 decode_content=decode_content, 

924 **response_kw, 

925 ) 

926 

927 # Check if we should retry the HTTP response. 

928 has_retry_after = bool(response.headers.get("Retry-After")) 

929 if retries.is_retry(method, response.status, has_retry_after): 

930 try: 

931 retries = retries.increment(method, url, response=response, _pool=self) 

932 except MaxRetryError: 

933 if retries.raise_on_status: 

934 response.drain_conn() 

935 raise 

936 return response 

937 

938 response.drain_conn() 

939 retries.sleep(response) 

940 log.debug("Retry: %s", url) 

941 return self.urlopen( 

942 method, 

943 url, 

944 body, 

945 headers, 

946 retries=retries, 

947 redirect=redirect, 

948 assert_same_host=assert_same_host, 

949 timeout=timeout, 

950 pool_timeout=pool_timeout, 

951 release_conn=release_conn, 

952 chunked=chunked, 

953 body_pos=body_pos, 

954 preload_content=preload_content, 

955 decode_content=decode_content, 

956 **response_kw, 

957 ) 

958 

959 return response 

960 

961 

962class HTTPSConnectionPool(HTTPConnectionPool): 

963 """ 

964 Same as :class:`.HTTPConnectionPool`, but HTTPS. 

965 

966 :class:`.HTTPSConnection` uses one of ``assert_fingerprint``, 

967 ``assert_hostname`` and ``host`` in this order to verify connections. 

968 If ``assert_hostname`` is False, no verification is done. 

969 

970 The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``, 

971 ``ca_cert_dir``, ``ssl_version``, ``key_password`` are only used if :mod:`ssl` 

972 is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade 

973 the connection socket into an SSL socket. 

974 """ 

975 

976 scheme = "https" 

977 ConnectionCls: type[BaseHTTPSConnection] = HTTPSConnection 

978 

979 def __init__( 

980 self, 

981 host: str, 

982 port: int | None = None, 

983 timeout: _TYPE_TIMEOUT | None = _DEFAULT_TIMEOUT, 

984 maxsize: int = 1, 

985 block: bool = False, 

986 headers: typing.Mapping[str, str] | None = None, 

987 retries: Retry | bool | int | None = None, 

988 _proxy: Url | None = None, 

989 _proxy_headers: typing.Mapping[str, str] | None = None, 

990 key_file: str | None = None, 

991 cert_file: str | None = None, 

992 cert_reqs: int | str | None = None, 

993 key_password: str | None = None, 

994 ca_certs: str | None = None, 

995 ssl_version: int | str | None = None, 

996 ssl_minimum_version: ssl.TLSVersion | None = None, 

997 ssl_maximum_version: ssl.TLSVersion | None = None, 

998 assert_hostname: str | Literal[False] | None = None, 

999 assert_fingerprint: str | None = None, 

1000 ca_cert_dir: str | None = None, 

1001 **conn_kw: typing.Any, 

1002 ) -> None: 

1003 super().__init__( 

1004 host, 

1005 port, 

1006 timeout, 

1007 maxsize, 

1008 block, 

1009 headers, 

1010 retries, 

1011 _proxy, 

1012 _proxy_headers, 

1013 **conn_kw, 

1014 ) 

1015 

1016 self.key_file = key_file 

1017 self.cert_file = cert_file 

1018 self.cert_reqs = cert_reqs 

1019 self.key_password = key_password 

1020 self.ca_certs = ca_certs 

1021 self.ca_cert_dir = ca_cert_dir 

1022 self.ssl_version = ssl_version 

1023 self.ssl_minimum_version = ssl_minimum_version 

1024 self.ssl_maximum_version = ssl_maximum_version 

1025 self.assert_hostname = assert_hostname 

1026 self.assert_fingerprint = assert_fingerprint 

1027 

1028 def _prepare_proxy(self, conn: HTTPSConnection) -> None: # type: ignore[override] 

1029 """Establishes a tunnel connection through HTTP CONNECT.""" 

1030 if self.proxy and self.proxy.scheme == "https": 

1031 tunnel_scheme = "https" 

1032 else: 

1033 tunnel_scheme = "http" 

1034 

1035 conn.set_tunnel( 

1036 scheme=tunnel_scheme, 

1037 host=self._tunnel_host, 

1038 port=self.port, 

1039 headers=self.proxy_headers, 

1040 ) 

1041 conn.connect() 

1042 

1043 def _new_conn(self) -> BaseHTTPSConnection: 

1044 """ 

1045 Return a fresh :class:`urllib3.connection.HTTPConnection`. 

1046 """ 

1047 self.num_connections += 1 

1048 log.debug( 

1049 "Starting new HTTPS connection (%d): %s:%s", 

1050 self.num_connections, 

1051 self.host, 

1052 self.port or "443", 

1053 ) 

1054 

1055 if not self.ConnectionCls or self.ConnectionCls is DummyConnection: # type: ignore[comparison-overlap] 

1056 raise ImportError( 

1057 "Can't connect to HTTPS URL because the SSL module is not available." 

1058 ) 

1059 

1060 actual_host: str = self.host 

1061 actual_port = self.port 

1062 if self.proxy is not None and self.proxy.host is not None: 

1063 actual_host = self.proxy.host 

1064 actual_port = self.proxy.port 

1065 

1066 return self.ConnectionCls( 

1067 host=actual_host, 

1068 port=actual_port, 

1069 timeout=self.timeout.connect_timeout, 

1070 cert_file=self.cert_file, 

1071 key_file=self.key_file, 

1072 key_password=self.key_password, 

1073 cert_reqs=self.cert_reqs, 

1074 ca_certs=self.ca_certs, 

1075 ca_cert_dir=self.ca_cert_dir, 

1076 assert_hostname=self.assert_hostname, 

1077 assert_fingerprint=self.assert_fingerprint, 

1078 ssl_version=self.ssl_version, 

1079 ssl_minimum_version=self.ssl_minimum_version, 

1080 ssl_maximum_version=self.ssl_maximum_version, 

1081 **self.conn_kw, 

1082 ) 

1083 

1084 def _validate_conn(self, conn: BaseHTTPConnection) -> None: 

1085 """ 

1086 Called right before a request is made, after the socket is created. 

1087 """ 

1088 super()._validate_conn(conn) 

1089 

1090 # Force connect early to allow us to validate the connection. 

1091 if conn.is_closed: 

1092 conn.connect() 

1093 

1094 if not conn.is_verified: 

1095 warnings.warn( 

1096 ( 

1097 f"Unverified HTTPS request is being made to host '{conn.host}'. " 

1098 "Adding certificate verification is strongly advised. See: " 

1099 "https://urllib3.readthedocs.io/en/latest/advanced-usage.html" 

1100 "#tls-warnings" 

1101 ), 

1102 InsecureRequestWarning, 

1103 ) 

1104 

1105 

1106def connection_from_url(url: str, **kw: typing.Any) -> HTTPConnectionPool: 

1107 """ 

1108 Given a url, return an :class:`.ConnectionPool` instance of its host. 

1109 

1110 This is a shortcut for not having to parse out the scheme, host, and port 

1111 of the url before creating an :class:`.ConnectionPool` instance. 

1112 

1113 :param url: 

1114 Absolute URL string that must include the scheme. Port is optional. 

1115 

1116 :param \\**kw: 

1117 Passes additional parameters to the constructor of the appropriate 

1118 :class:`.ConnectionPool`. Useful for specifying things like 

1119 timeout, maxsize, headers, etc. 

1120 

1121 Example:: 

1122 

1123 >>> conn = connection_from_url('http://google.com/') 

1124 >>> r = conn.request('GET', '/') 

1125 """ 

1126 scheme, _, host, port, *_ = parse_url(url) 

1127 scheme = scheme or "http" 

1128 port = port or port_by_scheme.get(scheme, 80) 

1129 if scheme == "https": 

1130 return HTTPSConnectionPool(host, port=port, **kw) # type: ignore[arg-type] 

1131 else: 

1132 return HTTPConnectionPool(host, port=port, **kw) # type: ignore[arg-type] 

1133 

1134 

1135@typing.overload 

1136def _normalize_host(host: None, scheme: str | None) -> None: 

1137 ... 

1138 

1139 

1140@typing.overload 

1141def _normalize_host(host: str, scheme: str | None) -> str: 

1142 ... 

1143 

1144 

1145def _normalize_host(host: str | None, scheme: str | None) -> str | None: 

1146 """ 

1147 Normalize hosts for comparisons and use with sockets. 

1148 """ 

1149 

1150 host = normalize_host(host, scheme) 

1151 

1152 # httplib doesn't like it when we include brackets in IPv6 addresses 

1153 # Specifically, if we include brackets but also pass the port then 

1154 # httplib crazily doubles up the square brackets on the Host header. 

1155 # Instead, we need to make sure we never pass ``None`` as the port. 

1156 # However, for backward compatibility reasons we can't actually 

1157 # *assert* that. See http://bugs.python.org/issue28539 

1158 if host and host.startswith("[") and host.endswith("]"): 

1159 host = host[1:-1] 

1160 return host 

1161 

1162 

1163def _url_from_pool( 

1164 pool: HTTPConnectionPool | HTTPSConnectionPool, path: str | None = None 

1165) -> str: 

1166 """Returns the URL from a given connection pool. This is mainly used for testing and logging.""" 

1167 return Url(scheme=pool.scheme, host=pool.host, port=pool.port, path=path).url 

1168 

1169 

1170def _close_pool_connections(pool: queue.LifoQueue[typing.Any]) -> None: 

1171 """Drains a queue of connections and closes each one.""" 

1172 try: 

1173 while True: 

1174 conn = pool.get(block=False) 

1175 if conn: 

1176 conn.close() 

1177 except queue.Empty: 

1178 pass # Done.