Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.11/site-packages/urllib3/connectionpool.py: 20%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

344 statements  

1from __future__ import annotations 

2 

3import errno 

4import logging 

5import queue 

6import sys 

7import typing 

8import warnings 

9import weakref 

10from socket import timeout as SocketTimeout 

11from types import TracebackType 

12 

13from ._base_connection import _TYPE_BODY 

14from ._collections import HTTPHeaderDict 

15from ._request_methods import RequestMethods 

16from .connection import ( 

17 BaseSSLError, 

18 BrokenPipeError, 

19 DummyConnection, 

20 HTTPConnection, 

21 HTTPException, 

22 HTTPSConnection, 

23 ProxyConfig, 

24 _wrap_proxy_error, 

25) 

26from .connection import port_by_scheme as port_by_scheme 

27from .exceptions import ( 

28 ClosedPoolError, 

29 EmptyPoolError, 

30 FullPoolError, 

31 HostChangedError, 

32 InsecureRequestWarning, 

33 LocationValueError, 

34 MaxRetryError, 

35 NewConnectionError, 

36 ProtocolError, 

37 ProxyError, 

38 ReadTimeoutError, 

39 SSLError, 

40 TimeoutError, 

41) 

42from .response import BaseHTTPResponse 

43from .util.connection import is_connection_dropped 

44from .util.proxy import connection_requires_http_tunnel 

45from .util.request import _TYPE_BODY_POSITION, set_file_position 

46from .util.retry import Retry 

47from .util.ssl_match_hostname import CertificateError 

48from .util.timeout import _DEFAULT_TIMEOUT, _TYPE_DEFAULT, Timeout 

49from .util.url import Url, _encode_target 

50from .util.url import _normalize_host as normalize_host 

51from .util.url import parse_url 

52from .util.util import to_str 

53 

54if typing.TYPE_CHECKING: 

55 import ssl 

56 

57 from typing_extensions import Self 

58 

59 from ._base_connection import BaseHTTPConnection, BaseHTTPSConnection 

60 

61log = logging.getLogger(__name__) 

62 

63_TYPE_TIMEOUT = typing.Union[Timeout, float, _TYPE_DEFAULT, None] 

64 

65 

66# Pool objects 

67class ConnectionPool: 

68 """ 

69 Base class for all connection pools, such as 

70 :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`. 

71 

72 .. note:: 

73 ConnectionPool.urlopen() does not normalize or percent-encode target URIs 

74 which is useful if your target server doesn't support percent-encoded 

75 target URIs. 

76 """ 

77 

78 scheme: str | None = None 

79 QueueCls = queue.LifoQueue 

80 

81 def __init__(self, host: str, port: int | None = None) -> None: 

82 if not host: 

83 raise LocationValueError("No host specified.") 

84 

85 self.host = _normalize_host(host, scheme=self.scheme) 

86 self.port = port 

87 

88 # This property uses 'normalize_host()' (not '_normalize_host()') 

89 # to avoid removing square braces around IPv6 addresses. 

90 # This value is sent to `HTTPConnection.set_tunnel()` if called 

91 # because square braces are required for HTTP CONNECT tunneling. 

92 self._tunnel_host = normalize_host(host, scheme=self.scheme).lower() 

93 

94 def __str__(self) -> str: 

95 return f"{type(self).__name__}(host={self.host!r}, port={self.port!r})" 

96 

97 def __enter__(self) -> Self: 

98 return self 

99 

100 def __exit__( 

101 self, 

102 exc_type: type[BaseException] | None, 

103 exc_val: BaseException | None, 

104 exc_tb: TracebackType | None, 

105 ) -> typing.Literal[False]: 

106 self.close() 

107 # Return False to re-raise any potential exceptions 

108 return False 

109 

110 def close(self) -> None: 

111 """ 

112 Close all pooled connections and disable the pool. 

113 """ 

114 

115 

116# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252 

117_blocking_errnos = {errno.EAGAIN, errno.EWOULDBLOCK} 

118 

119 

120class HTTPConnectionPool(ConnectionPool, RequestMethods): 

121 """ 

122 Thread-safe connection pool for one host. 

123 

124 :param host: 

125 Host used for this HTTP Connection (e.g. "localhost"), passed into 

126 :class:`http.client.HTTPConnection`. 

127 

128 :param port: 

129 Port used for this HTTP Connection (None is equivalent to 80), passed 

130 into :class:`http.client.HTTPConnection`. 

131 

132 :param timeout: 

133 Socket timeout in seconds for each individual connection. This can 

134 be a float or integer, which sets the timeout for the HTTP request, 

135 or an instance of :class:`urllib3.util.Timeout` which gives you more 

136 fine-grained control over request timeouts. After the constructor has 

137 been parsed, this is always a `urllib3.util.Timeout` object. 

138 

139 :param maxsize: 

140 Number of connections to save that can be reused. More than 1 is useful 

141 in multithreaded situations. If ``block`` is set to False, more 

142 connections will be created but they will not be saved once they've 

143 been used. 

144 

145 :param block: 

146 If set to True, no more than ``maxsize`` connections will be used at 

147 a time. When no free connections are available, the call will block 

148 until a connection has been released. This is a useful side effect for 

149 particular multithreaded situations where one does not want to use more 

150 than maxsize connections per host to prevent flooding. 

151 

152 :param headers: 

153 Headers to include with all requests, unless other headers are given 

154 explicitly. 

155 

156 :param retries: 

157 Retry configuration to use by default with requests in this pool. 

158 

159 :param _proxy: 

160 Parsed proxy URL, should not be used directly, instead, see 

161 :class:`urllib3.ProxyManager` 

162 

163 :param _proxy_headers: 

164 A dictionary with proxy headers, should not be used directly, 

165 instead, see :class:`urllib3.ProxyManager` 

166 

167 :param \\**conn_kw: 

168 Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`, 

169 :class:`urllib3.connection.HTTPSConnection` instances. 

170 """ 

171 

172 scheme = "http" 

173 ConnectionCls: type[BaseHTTPConnection] | type[BaseHTTPSConnection] = HTTPConnection 

174 

175 def __init__( 

176 self, 

177 host: str, 

178 port: int | None = None, 

179 timeout: _TYPE_TIMEOUT | None = _DEFAULT_TIMEOUT, 

180 maxsize: int = 1, 

181 block: bool = False, 

182 headers: typing.Mapping[str, str] | None = None, 

183 retries: Retry | bool | int | None = None, 

184 _proxy: Url | None = None, 

185 _proxy_headers: typing.Mapping[str, str] | None = None, 

186 _proxy_config: ProxyConfig | None = None, 

187 **conn_kw: typing.Any, 

188 ): 

189 ConnectionPool.__init__(self, host, port) 

190 RequestMethods.__init__(self, headers) 

191 

192 if not isinstance(timeout, Timeout): 

193 timeout = Timeout.from_float(timeout) 

194 

195 if retries is None: 

196 retries = Retry.DEFAULT 

197 

198 self.timeout = timeout 

199 self.retries = retries 

200 

201 self.pool: queue.LifoQueue[typing.Any] | None = self.QueueCls(maxsize) 

202 self.block = block 

203 

204 self.proxy = _proxy 

205 self.proxy_headers = _proxy_headers or {} 

206 self.proxy_config = _proxy_config 

207 

208 # Fill the queue up so that doing get() on it will block properly 

209 for _ in range(maxsize): 

210 self.pool.put(None) 

211 

212 # These are mostly for testing and debugging purposes. 

213 self.num_connections = 0 

214 self.num_requests = 0 

215 self.conn_kw = conn_kw 

216 

217 if self.proxy: 

218 # Enable Nagle's algorithm for proxies, to avoid packet fragmentation. 

219 # We cannot know if the user has added default socket options, so we cannot replace the 

220 # list. 

221 self.conn_kw.setdefault("socket_options", []) 

222 

223 self.conn_kw["proxy"] = self.proxy 

224 self.conn_kw["proxy_config"] = self.proxy_config 

225 

226 # Do not pass 'self' as callback to 'finalize'. 

227 # Then the 'finalize' would keep an endless living (leak) to self. 

228 # By just passing a reference to the pool allows the garbage collector 

229 # to free self if nobody else has a reference to it. 

230 pool = self.pool 

231 

232 # Close all the HTTPConnections in the pool before the 

233 # HTTPConnectionPool object is garbage collected. 

234 weakref.finalize(self, _close_pool_connections, pool) 

235 

236 def _new_conn(self) -> BaseHTTPConnection: 

237 """ 

238 Return a fresh :class:`HTTPConnection`. 

239 """ 

240 self.num_connections += 1 

241 log.debug( 

242 "Starting new HTTP connection (%d): %s:%s", 

243 self.num_connections, 

244 self.host, 

245 self.port or "80", 

246 ) 

247 

248 conn = self.ConnectionCls( 

249 host=self.host, 

250 port=self.port, 

251 timeout=self.timeout.connect_timeout, 

252 **self.conn_kw, 

253 ) 

254 return conn 

255 

256 def _get_conn(self, timeout: float | None = None) -> BaseHTTPConnection: 

257 """ 

258 Get a connection. Will return a pooled connection if one is available. 

259 

260 If no connections are available and :prop:`.block` is ``False``, then a 

261 fresh connection is returned. 

262 

263 :param timeout: 

264 Seconds to wait before giving up and raising 

265 :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and 

266 :prop:`.block` is ``True``. 

267 """ 

268 conn = None 

269 

270 if self.pool is None: 

271 raise ClosedPoolError(self, "Pool is closed.") 

272 

273 try: 

274 conn = self.pool.get(block=self.block, timeout=timeout) 

275 

276 except AttributeError: # self.pool is None 

277 raise ClosedPoolError(self, "Pool is closed.") from None # Defensive: 

278 

279 except queue.Empty: 

280 if self.block: 

281 raise EmptyPoolError( 

282 self, 

283 "Pool is empty and a new connection can't be opened due to blocking mode.", 

284 ) from None 

285 pass # Oh well, we'll create a new connection then 

286 

287 # If this is a persistent connection, check if it got disconnected 

288 if conn and is_connection_dropped(conn): 

289 log.debug("Resetting dropped connection: %s", self.host) 

290 conn.close() 

291 

292 return conn or self._new_conn() 

293 

294 def _put_conn(self, conn: BaseHTTPConnection | None) -> None: 

295 """ 

296 Put a connection back into the pool. 

297 

298 :param conn: 

299 Connection object for the current host and port as returned by 

300 :meth:`._new_conn` or :meth:`._get_conn`. 

301 

302 If the pool is already full, the connection is closed and discarded 

303 because we exceeded maxsize. If connections are discarded frequently, 

304 then maxsize should be increased. 

305 

306 If the pool is closed, then the connection will be closed and discarded. 

307 """ 

308 if self.pool is not None: 

309 try: 

310 self.pool.put(conn, block=False) 

311 return # Everything is dandy, done. 

312 except AttributeError: 

313 # self.pool is None. 

314 pass 

315 except queue.Full: 

316 # Connection never got put back into the pool, close it. 

317 if conn: 

318 conn.close() 

319 

320 if self.block: 

321 # This should never happen if you got the conn from self._get_conn 

322 raise FullPoolError( 

323 self, 

324 "Pool reached maximum size and no more connections are allowed.", 

325 ) from None 

326 

327 log.warning( 

328 "Connection pool is full, discarding connection: %s. Connection pool size: %s", 

329 self.host, 

330 self.pool.qsize(), 

331 ) 

332 

333 # Connection never got put back into the pool, close it. 

334 if conn: 

335 conn.close() 

336 

337 def _validate_conn(self, conn: BaseHTTPConnection) -> None: 

338 """ 

339 Called right before a request is made, after the socket is created. 

340 """ 

341 

342 def _prepare_proxy(self, conn: BaseHTTPConnection) -> None: 

343 # Nothing to do for HTTP connections. 

344 pass 

345 

346 def _get_timeout(self, timeout: _TYPE_TIMEOUT) -> Timeout: 

347 """Helper that always returns a :class:`urllib3.util.Timeout`""" 

348 if timeout is _DEFAULT_TIMEOUT: 

349 return self.timeout.clone() 

350 

351 if isinstance(timeout, Timeout): 

352 return timeout.clone() 

353 else: 

354 # User passed us an int/float. This is for backwards compatibility, 

355 # can be removed later 

356 return Timeout.from_float(timeout) 

357 

358 def _raise_timeout( 

359 self, 

360 err: BaseSSLError | OSError | SocketTimeout, 

361 url: str, 

362 timeout_value: _TYPE_TIMEOUT | None, 

363 ) -> None: 

364 """Is the error actually a timeout? Will raise a ReadTimeout or pass""" 

365 

366 if isinstance(err, SocketTimeout): 

367 raise ReadTimeoutError( 

368 self, url, f"Read timed out. (read timeout={timeout_value})" 

369 ) from err 

370 

371 # See the above comment about EAGAIN in Python 3. 

372 if hasattr(err, "errno") and err.errno in _blocking_errnos: 

373 raise ReadTimeoutError( 

374 self, url, f"Read timed out. (read timeout={timeout_value})" 

375 ) from err 

376 

377 def _make_request( 

378 self, 

379 conn: BaseHTTPConnection, 

380 method: str, 

381 url: str, 

382 body: _TYPE_BODY | None = None, 

383 headers: typing.Mapping[str, str] | None = None, 

384 retries: Retry | None = None, 

385 timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, 

386 chunked: bool = False, 

387 response_conn: BaseHTTPConnection | None = None, 

388 preload_content: bool = True, 

389 decode_content: bool = True, 

390 enforce_content_length: bool = True, 

391 ) -> BaseHTTPResponse: 

392 """ 

393 Perform a request on a given urllib connection object taken from our 

394 pool. 

395 

396 :param conn: 

397 a connection from one of our connection pools 

398 

399 :param method: 

400 HTTP request method (such as GET, POST, PUT, etc.) 

401 

402 :param url: 

403 The URL to perform the request on. 

404 

405 :param body: 

406 Data to send in the request body, either :class:`str`, :class:`bytes`, 

407 an iterable of :class:`str`/:class:`bytes`, or a file-like object. 

408 

409 :param headers: 

410 Dictionary of custom headers to send, such as User-Agent, 

411 If-None-Match, etc. If None, pool headers are used. If provided, 

412 these headers completely replace any pool-specific headers. 

413 

414 :param retries: 

415 Configure the number of retries to allow before raising a 

416 :class:`~urllib3.exceptions.MaxRetryError` exception. 

417 

418 Pass ``None`` to retry until you receive a response. Pass a 

419 :class:`~urllib3.util.retry.Retry` object for fine-grained control 

420 over different types of retries. 

421 Pass an integer number to retry connection errors that many times, 

422 but no other types of errors. Pass zero to never retry. 

423 

424 If ``False``, then retries are disabled and any exception is raised 

425 immediately. Also, instead of raising a MaxRetryError on redirects, 

426 the redirect response will be returned. 

427 

428 :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. 

429 

430 :param timeout: 

431 If specified, overrides the default timeout for this one 

432 request. It may be a float (in seconds) or an instance of 

433 :class:`urllib3.util.Timeout`. 

434 

435 :param chunked: 

436 If True, urllib3 will send the body using chunked transfer 

437 encoding. Otherwise, urllib3 will send the body using the standard 

438 content-length form. Defaults to False. 

439 

440 :param response_conn: 

441 Set this to ``None`` if you will handle releasing the connection or 

442 set the connection to have the response release it. 

443 

444 :param preload_content: 

445 If True, the response's body will be preloaded during construction. 

446 

447 :param decode_content: 

448 If True, will attempt to decode the body based on the 

449 'content-encoding' header. 

450 

451 :param enforce_content_length: 

452 Enforce content length checking. Body returned by server must match 

453 value of Content-Length header, if present. Otherwise, raise error. 

454 """ 

455 self.num_requests += 1 

456 

457 timeout_obj = self._get_timeout(timeout) 

458 timeout_obj.start_connect() 

459 conn.timeout = Timeout.resolve_default_timeout(timeout_obj.connect_timeout) 

460 

461 try: 

462 # Trigger any extra validation we need to do. 

463 try: 

464 self._validate_conn(conn) 

465 except (SocketTimeout, BaseSSLError) as e: 

466 self._raise_timeout(err=e, url=url, timeout_value=conn.timeout) 

467 raise 

468 

469 # _validate_conn() starts the connection to an HTTPS proxy 

470 # so we need to wrap errors with 'ProxyError' here too. 

471 except ( 

472 OSError, 

473 NewConnectionError, 

474 TimeoutError, 

475 BaseSSLError, 

476 CertificateError, 

477 SSLError, 

478 ) as e: 

479 new_e: Exception = e 

480 if isinstance(e, (BaseSSLError, CertificateError)): 

481 new_e = SSLError(e) 

482 # If the connection didn't successfully connect to it's proxy 

483 # then there 

484 if isinstance( 

485 new_e, (OSError, NewConnectionError, TimeoutError, SSLError) 

486 ) and (conn and conn.proxy and not conn.has_connected_to_proxy): 

487 new_e = _wrap_proxy_error(new_e, conn.proxy.scheme) 

488 raise new_e 

489 

490 # conn.request() calls http.client.*.request, not the method in 

491 # urllib3.request. It also calls makefile (recv) on the socket. 

492 try: 

493 conn.request( 

494 method, 

495 url, 

496 body=body, 

497 headers=headers, 

498 chunked=chunked, 

499 preload_content=preload_content, 

500 decode_content=decode_content, 

501 enforce_content_length=enforce_content_length, 

502 ) 

503 

504 # We are swallowing BrokenPipeError (errno.EPIPE) since the server is 

505 # legitimately able to close the connection after sending a valid response. 

506 # With this behaviour, the received response is still readable. 

507 except BrokenPipeError: 

508 pass 

509 except OSError as e: 

510 # MacOS/Linux 

511 # EPROTOTYPE and ECONNRESET are needed on macOS 

512 # https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/ 

513 # Condition changed later to emit ECONNRESET instead of only EPROTOTYPE. 

514 if e.errno != errno.EPROTOTYPE and e.errno != errno.ECONNRESET: 

515 raise 

516 

517 # Reset the timeout for the recv() on the socket 

518 read_timeout = timeout_obj.read_timeout 

519 

520 if not conn.is_closed: 

521 # In Python 3 socket.py will catch EAGAIN and return None when you 

522 # try and read into the file pointer created by http.client, which 

523 # instead raises a BadStatusLine exception. Instead of catching 

524 # the exception and assuming all BadStatusLine exceptions are read 

525 # timeouts, check for a zero timeout before making the request. 

526 if read_timeout == 0: 

527 raise ReadTimeoutError( 

528 self, url, f"Read timed out. (read timeout={read_timeout})" 

529 ) 

530 conn.timeout = read_timeout 

531 

532 # Receive the response from the server 

533 try: 

534 response = conn.getresponse() 

535 except (BaseSSLError, OSError) as e: 

536 self._raise_timeout(err=e, url=url, timeout_value=read_timeout) 

537 raise 

538 

539 # Set properties that are used by the pooling layer. 

540 response.retries = retries 

541 response._connection = response_conn # type: ignore[attr-defined] 

542 response._pool = self # type: ignore[attr-defined] 

543 

544 log.debug( 

545 '%s://%s:%s "%s %s %s" %s %s', 

546 self.scheme, 

547 self.host, 

548 self.port, 

549 method, 

550 url, 

551 response.version_string, 

552 response.status, 

553 response.length_remaining, 

554 ) 

555 

556 return response 

557 

558 def close(self) -> None: 

559 """ 

560 Close all pooled connections and disable the pool. 

561 """ 

562 if self.pool is None: 

563 return 

564 # Disable access to the pool 

565 old_pool, self.pool = self.pool, None 

566 

567 # Close all the HTTPConnections in the pool. 

568 _close_pool_connections(old_pool) 

569 

570 def is_same_host(self, url: str) -> bool: 

571 """ 

572 Check if the given ``url`` is a member of the same host as this 

573 connection pool. 

574 """ 

575 if url.startswith("/"): 

576 return True 

577 

578 # TODO: Add optional support for socket.gethostbyname checking. 

579 scheme, _, host, port, *_ = parse_url(url) 

580 scheme = scheme or "http" 

581 if host is not None: 

582 host = _normalize_host(host, scheme=scheme) 

583 

584 # Use explicit default port for comparison when none is given 

585 if self.port and not port: 

586 port = port_by_scheme.get(scheme) 

587 elif not self.port and port == port_by_scheme.get(scheme): 

588 port = None 

589 

590 return (scheme, host, port) == (self.scheme, self.host, self.port) 

591 

592 def urlopen( # type: ignore[override] 

593 self, 

594 method: str, 

595 url: str, 

596 body: _TYPE_BODY | None = None, 

597 headers: typing.Mapping[str, str] | None = None, 

598 retries: Retry | bool | int | None = None, 

599 redirect: bool = True, 

600 assert_same_host: bool = True, 

601 timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, 

602 pool_timeout: int | None = None, 

603 release_conn: bool | None = None, 

604 chunked: bool = False, 

605 body_pos: _TYPE_BODY_POSITION | None = None, 

606 preload_content: bool = True, 

607 decode_content: bool = True, 

608 **response_kw: typing.Any, 

609 ) -> BaseHTTPResponse: 

610 """ 

611 Get a connection from the pool and perform an HTTP request. This is the 

612 lowest level call for making a request, so you'll need to specify all 

613 the raw details. 

614 

615 .. note:: 

616 

617 More commonly, it's appropriate to use a convenience method 

618 such as :meth:`request`. 

619 

620 .. note:: 

621 

622 `release_conn` will only behave as expected if 

623 `preload_content=False` because we want to make 

624 `preload_content=False` the default behaviour someday soon without 

625 breaking backwards compatibility. 

626 

627 :param method: 

628 HTTP request method (such as GET, POST, PUT, etc.) 

629 

630 :param url: 

631 The URL to perform the request on. 

632 

633 :param body: 

634 Data to send in the request body, either :class:`str`, :class:`bytes`, 

635 an iterable of :class:`str`/:class:`bytes`, or a file-like object. 

636 

637 :param headers: 

638 Dictionary of custom headers to send, such as User-Agent, 

639 If-None-Match, etc. If None, pool headers are used. If provided, 

640 these headers completely replace any pool-specific headers. 

641 

642 :param retries: 

643 Configure the number of retries to allow before raising a 

644 :class:`~urllib3.exceptions.MaxRetryError` exception. 

645 

646 If ``None`` (default) will retry 3 times, see ``Retry.DEFAULT``. Pass a 

647 :class:`~urllib3.util.retry.Retry` object for fine-grained control 

648 over different types of retries. 

649 Pass an integer number to retry connection errors that many times, 

650 but no other types of errors. Pass zero to never retry. 

651 

652 If ``False``, then retries are disabled and any exception is raised 

653 immediately. Also, instead of raising a MaxRetryError on redirects, 

654 the redirect response will be returned. 

655 

656 :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. 

657 

658 :param redirect: 

659 If True, automatically handle redirects (status codes 301, 302, 

660 303, 307, 308). Each redirect counts as a retry. Disabling retries 

661 will disable redirect, too. 

662 

663 :param assert_same_host: 

664 If ``True``, will make sure that the host of the pool requests is 

665 consistent else will raise HostChangedError. When ``False``, you can 

666 use the pool on an HTTP proxy and request foreign hosts. 

667 

668 :param timeout: 

669 If specified, overrides the default timeout for this one 

670 request. It may be a float (in seconds) or an instance of 

671 :class:`urllib3.util.Timeout`. 

672 

673 :param pool_timeout: 

674 If set and the pool is set to block=True, then this method will 

675 block for ``pool_timeout`` seconds and raise EmptyPoolError if no 

676 connection is available within the time period. 

677 

678 :param bool preload_content: 

679 If True, the response's body will be preloaded into memory. 

680 

681 :param bool decode_content: 

682 If True, will attempt to decode the body based on the 

683 'content-encoding' header. 

684 

685 :param release_conn: 

686 If False, then the urlopen call will not release the connection 

687 back into the pool once a response is received (but will release if 

688 you read the entire contents of the response such as when 

689 `preload_content=True`). This is useful if you're not preloading 

690 the response's content immediately. You will need to call 

691 ``r.release_conn()`` on the response ``r`` to return the connection 

692 back into the pool. If None, it takes the value of ``preload_content`` 

693 which defaults to ``True``. 

694 

695 :param bool chunked: 

696 If True, urllib3 will send the body using chunked transfer 

697 encoding. Otherwise, urllib3 will send the body using the standard 

698 content-length form. Defaults to False. 

699 

700 :param int body_pos: 

701 Position to seek to in file-like body in the event of a retry or 

702 redirect. Typically this won't need to be set because urllib3 will 

703 auto-populate the value when needed. 

704 """ 

705 parsed_url = parse_url(url) 

706 destination_scheme = parsed_url.scheme 

707 

708 if headers is None: 

709 headers = self.headers 

710 

711 if not isinstance(retries, Retry): 

712 retries = Retry.from_int(retries, redirect=redirect, default=self.retries) 

713 

714 if release_conn is None: 

715 release_conn = preload_content 

716 

717 # Check host 

718 if assert_same_host and not self.is_same_host(url): 

719 raise HostChangedError(self, url, retries) 

720 

721 # Ensure that the URL we're connecting to is properly encoded 

722 if url.startswith("/"): 

723 url = to_str(_encode_target(url)) 

724 else: 

725 url = to_str(parsed_url.url) 

726 

727 conn = None 

728 

729 # Track whether `conn` needs to be released before 

730 # returning/raising/recursing. Update this variable if necessary, and 

731 # leave `release_conn` constant throughout the function. That way, if 

732 # the function recurses, the original value of `release_conn` will be 

733 # passed down into the recursive call, and its value will be respected. 

734 # 

735 # See issue #651 [1] for details. 

736 # 

737 # [1] <https://github.com/urllib3/urllib3/issues/651> 

738 release_this_conn = release_conn 

739 

740 http_tunnel_required = connection_requires_http_tunnel( 

741 self.proxy, self.proxy_config, destination_scheme 

742 ) 

743 

744 # Merge the proxy headers. Only done when not using HTTP CONNECT. We 

745 # have to copy the headers dict so we can safely change it without those 

746 # changes being reflected in anyone else's copy. 

747 if not http_tunnel_required: 

748 headers = headers.copy() # type: ignore[attr-defined] 

749 headers.update(self.proxy_headers) # type: ignore[union-attr] 

750 

751 # Must keep the exception bound to a separate variable or else Python 3 

752 # complains about UnboundLocalError. 

753 err = None 

754 

755 # Keep track of whether we cleanly exited the except block. This 

756 # ensures we do proper cleanup in finally. 

757 clean_exit = False 

758 

759 # Rewind body position, if needed. Record current position 

760 # for future rewinds in the event of a redirect/retry. 

761 body_pos = set_file_position(body, body_pos) 

762 

763 try: 

764 # Request a connection from the queue. 

765 timeout_obj = self._get_timeout(timeout) 

766 conn = self._get_conn(timeout=pool_timeout) 

767 

768 conn.timeout = timeout_obj.connect_timeout # type: ignore[assignment] 

769 

770 # Is this a closed/new connection that requires CONNECT tunnelling? 

771 if self.proxy is not None and http_tunnel_required and conn.is_closed: 

772 try: 

773 self._prepare_proxy(conn) 

774 except (BaseSSLError, OSError, SocketTimeout) as e: 

775 self._raise_timeout( 

776 err=e, url=self.proxy.url, timeout_value=conn.timeout 

777 ) 

778 raise 

779 

780 # If we're going to release the connection in ``finally:``, then 

781 # the response doesn't need to know about the connection. Otherwise 

782 # it will also try to release it and we'll have a double-release 

783 # mess. 

784 response_conn = conn if not release_conn else None 

785 

786 # Make the request on the HTTPConnection object 

787 response = self._make_request( 

788 conn, 

789 method, 

790 url, 

791 timeout=timeout_obj, 

792 body=body, 

793 headers=headers, 

794 chunked=chunked, 

795 retries=retries, 

796 response_conn=response_conn, 

797 preload_content=preload_content, 

798 decode_content=decode_content, 

799 **response_kw, 

800 ) 

801 

802 # Everything went great! 

803 clean_exit = True 

804 

805 except EmptyPoolError: 

806 # Didn't get a connection from the pool, no need to clean up 

807 clean_exit = True 

808 release_this_conn = False 

809 raise 

810 

811 except ( 

812 TimeoutError, 

813 HTTPException, 

814 OSError, 

815 ProtocolError, 

816 BaseSSLError, 

817 SSLError, 

818 CertificateError, 

819 ProxyError, 

820 ) as e: 

821 # Discard the connection for these exceptions. It will be 

822 # replaced during the next _get_conn() call. 

823 clean_exit = False 

824 new_e: Exception = e 

825 if isinstance(e, (BaseSSLError, CertificateError)): 

826 new_e = SSLError(e) 

827 if isinstance( 

828 new_e, 

829 ( 

830 OSError, 

831 NewConnectionError, 

832 TimeoutError, 

833 SSLError, 

834 HTTPException, 

835 ), 

836 ) and (conn and conn.proxy and not conn.has_connected_to_proxy): 

837 new_e = _wrap_proxy_error(new_e, conn.proxy.scheme) 

838 elif isinstance(new_e, (OSError, HTTPException)): 

839 new_e = ProtocolError("Connection aborted.", new_e) 

840 

841 retries = retries.increment( 

842 method, url, error=new_e, _pool=self, _stacktrace=sys.exc_info()[2] 

843 ) 

844 retries.sleep() 

845 

846 # Keep track of the error for the retry warning. 

847 err = e 

848 

849 finally: 

850 if not clean_exit: 

851 # We hit some kind of exception, handled or otherwise. We need 

852 # to throw the connection away unless explicitly told not to. 

853 # Close the connection, set the variable to None, and make sure 

854 # we put the None back in the pool to avoid leaking it. 

855 if conn: 

856 conn.close() 

857 conn = None 

858 release_this_conn = True 

859 

860 if release_this_conn: 

861 # Put the connection back to be reused. If the connection is 

862 # expired then it will be None, which will get replaced with a 

863 # fresh connection during _get_conn. 

864 self._put_conn(conn) 

865 

866 if not conn: 

867 # Try again 

868 log.warning( 

869 "Retrying (%r) after connection broken by '%r': %s", retries, err, url 

870 ) 

871 return self.urlopen( 

872 method, 

873 url, 

874 body, 

875 headers, 

876 retries, 

877 redirect, 

878 assert_same_host, 

879 timeout=timeout, 

880 pool_timeout=pool_timeout, 

881 release_conn=release_conn, 

882 chunked=chunked, 

883 body_pos=body_pos, 

884 preload_content=preload_content, 

885 decode_content=decode_content, 

886 **response_kw, 

887 ) 

888 

889 # Handle redirect? 

890 redirect_location = redirect and response.get_redirect_location() 

891 if redirect_location: 

892 if response.status == 303: 

893 # Change the method according to RFC 9110, Section 15.4.4. 

894 method = "GET" 

895 # And lose the body not to transfer anything sensitive. 

896 body = None 

897 headers = HTTPHeaderDict(headers)._prepare_for_method_change() 

898 

899 try: 

900 retries = retries.increment(method, url, response=response, _pool=self) 

901 except MaxRetryError: 

902 if retries.raise_on_redirect: 

903 response.drain_conn() 

904 raise 

905 return response 

906 

907 response.drain_conn() 

908 retries.sleep_for_retry(response) 

909 log.debug("Redirecting %s -> %s", url, redirect_location) 

910 return self.urlopen( 

911 method, 

912 redirect_location, 

913 body, 

914 headers, 

915 retries=retries, 

916 redirect=redirect, 

917 assert_same_host=assert_same_host, 

918 timeout=timeout, 

919 pool_timeout=pool_timeout, 

920 release_conn=release_conn, 

921 chunked=chunked, 

922 body_pos=body_pos, 

923 preload_content=preload_content, 

924 decode_content=decode_content, 

925 **response_kw, 

926 ) 

927 

928 # Check if we should retry the HTTP response. 

929 has_retry_after = bool(response.headers.get("Retry-After")) 

930 if retries.is_retry(method, response.status, has_retry_after): 

931 try: 

932 retries = retries.increment(method, url, response=response, _pool=self) 

933 except MaxRetryError: 

934 if retries.raise_on_status: 

935 response.drain_conn() 

936 raise 

937 return response 

938 

939 response.drain_conn() 

940 retries.sleep(response) 

941 log.debug("Retry: %s", url) 

942 return self.urlopen( 

943 method, 

944 url, 

945 body, 

946 headers, 

947 retries=retries, 

948 redirect=redirect, 

949 assert_same_host=assert_same_host, 

950 timeout=timeout, 

951 pool_timeout=pool_timeout, 

952 release_conn=release_conn, 

953 chunked=chunked, 

954 body_pos=body_pos, 

955 preload_content=preload_content, 

956 decode_content=decode_content, 

957 **response_kw, 

958 ) 

959 

960 return response 

961 

962 

963class HTTPSConnectionPool(HTTPConnectionPool): 

964 """ 

965 Same as :class:`.HTTPConnectionPool`, but HTTPS. 

966 

967 :class:`.HTTPSConnection` uses one of ``assert_fingerprint``, 

968 ``assert_hostname`` and ``host`` in this order to verify connections. 

969 If ``assert_hostname`` is False, no verification is done. 

970 

971 The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``, 

972 ``ca_cert_dir``, ``ssl_version``, ``key_password`` are only used if :mod:`ssl` 

973 is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade 

974 the connection socket into an SSL socket. 

975 """ 

976 

977 scheme = "https" 

978 ConnectionCls: type[BaseHTTPSConnection] = HTTPSConnection 

979 

980 def __init__( 

981 self, 

982 host: str, 

983 port: int | None = None, 

984 timeout: _TYPE_TIMEOUT | None = _DEFAULT_TIMEOUT, 

985 maxsize: int = 1, 

986 block: bool = False, 

987 headers: typing.Mapping[str, str] | None = None, 

988 retries: Retry | bool | int | None = None, 

989 _proxy: Url | None = None, 

990 _proxy_headers: typing.Mapping[str, str] | None = None, 

991 key_file: str | None = None, 

992 cert_file: str | None = None, 

993 cert_reqs: int | str | None = None, 

994 key_password: str | None = None, 

995 ca_certs: str | None = None, 

996 ssl_version: int | str | None = None, 

997 ssl_minimum_version: ssl.TLSVersion | None = None, 

998 ssl_maximum_version: ssl.TLSVersion | None = None, 

999 assert_hostname: str | typing.Literal[False] | None = None, 

1000 assert_fingerprint: str | None = None, 

1001 ca_cert_dir: str | None = None, 

1002 **conn_kw: typing.Any, 

1003 ) -> None: 

1004 super().__init__( 

1005 host, 

1006 port, 

1007 timeout, 

1008 maxsize, 

1009 block, 

1010 headers, 

1011 retries, 

1012 _proxy, 

1013 _proxy_headers, 

1014 **conn_kw, 

1015 ) 

1016 

1017 self.key_file = key_file 

1018 self.cert_file = cert_file 

1019 self.cert_reqs = cert_reqs 

1020 self.key_password = key_password 

1021 self.ca_certs = ca_certs 

1022 self.ca_cert_dir = ca_cert_dir 

1023 self.ssl_version = ssl_version 

1024 self.ssl_minimum_version = ssl_minimum_version 

1025 self.ssl_maximum_version = ssl_maximum_version 

1026 self.assert_hostname = assert_hostname 

1027 self.assert_fingerprint = assert_fingerprint 

1028 

1029 def _prepare_proxy(self, conn: HTTPSConnection) -> None: # type: ignore[override] 

1030 """Establishes a tunnel connection through HTTP CONNECT.""" 

1031 if self.proxy and self.proxy.scheme == "https": 

1032 tunnel_scheme = "https" 

1033 else: 

1034 tunnel_scheme = "http" 

1035 

1036 conn.set_tunnel( 

1037 scheme=tunnel_scheme, 

1038 host=self._tunnel_host, 

1039 port=self.port, 

1040 headers=self.proxy_headers, 

1041 ) 

1042 conn.connect() 

1043 

1044 def _new_conn(self) -> BaseHTTPSConnection: 

1045 """ 

1046 Return a fresh :class:`urllib3.connection.HTTPConnection`. 

1047 """ 

1048 self.num_connections += 1 

1049 log.debug( 

1050 "Starting new HTTPS connection (%d): %s:%s", 

1051 self.num_connections, 

1052 self.host, 

1053 self.port or "443", 

1054 ) 

1055 

1056 if not self.ConnectionCls or self.ConnectionCls is DummyConnection: # type: ignore[comparison-overlap] 

1057 raise ImportError( 

1058 "Can't connect to HTTPS URL because the SSL module is not available." 

1059 ) 

1060 

1061 actual_host: str = self.host 

1062 actual_port = self.port 

1063 if self.proxy is not None and self.proxy.host is not None: 

1064 actual_host = self.proxy.host 

1065 actual_port = self.proxy.port 

1066 

1067 return self.ConnectionCls( 

1068 host=actual_host, 

1069 port=actual_port, 

1070 timeout=self.timeout.connect_timeout, 

1071 cert_file=self.cert_file, 

1072 key_file=self.key_file, 

1073 key_password=self.key_password, 

1074 cert_reqs=self.cert_reqs, 

1075 ca_certs=self.ca_certs, 

1076 ca_cert_dir=self.ca_cert_dir, 

1077 assert_hostname=self.assert_hostname, 

1078 assert_fingerprint=self.assert_fingerprint, 

1079 ssl_version=self.ssl_version, 

1080 ssl_minimum_version=self.ssl_minimum_version, 

1081 ssl_maximum_version=self.ssl_maximum_version, 

1082 **self.conn_kw, 

1083 ) 

1084 

1085 def _validate_conn(self, conn: BaseHTTPConnection) -> None: 

1086 """ 

1087 Called right before a request is made, after the socket is created. 

1088 """ 

1089 super()._validate_conn(conn) 

1090 

1091 # Force connect early to allow us to validate the connection. 

1092 if conn.is_closed: 

1093 conn.connect() 

1094 

1095 # TODO revise this, see https://github.com/urllib3/urllib3/issues/2791 

1096 if not conn.is_verified and not conn.proxy_is_verified: 

1097 warnings.warn( 

1098 ( 

1099 f"Unverified HTTPS request is being made to host '{conn.host}'. " 

1100 "Adding certificate verification is strongly advised. See: " 

1101 "https://urllib3.readthedocs.io/en/latest/advanced-usage.html" 

1102 "#tls-warnings" 

1103 ), 

1104 InsecureRequestWarning, 

1105 ) 

1106 

1107 

1108def connection_from_url(url: str, **kw: typing.Any) -> HTTPConnectionPool: 

1109 """ 

1110 Given a url, return an :class:`.ConnectionPool` instance of its host. 

1111 

1112 This is a shortcut for not having to parse out the scheme, host, and port 

1113 of the url before creating an :class:`.ConnectionPool` instance. 

1114 

1115 :param url: 

1116 Absolute URL string that must include the scheme. Port is optional. 

1117 

1118 :param \\**kw: 

1119 Passes additional parameters to the constructor of the appropriate 

1120 :class:`.ConnectionPool`. Useful for specifying things like 

1121 timeout, maxsize, headers, etc. 

1122 

1123 Example:: 

1124 

1125 >>> conn = connection_from_url('http://google.com/') 

1126 >>> r = conn.request('GET', '/') 

1127 """ 

1128 scheme, _, host, port, *_ = parse_url(url) 

1129 scheme = scheme or "http" 

1130 port = port or port_by_scheme.get(scheme, 80) 

1131 if scheme == "https": 

1132 return HTTPSConnectionPool(host, port=port, **kw) # type: ignore[arg-type] 

1133 else: 

1134 return HTTPConnectionPool(host, port=port, **kw) # type: ignore[arg-type] 

1135 

1136 

1137@typing.overload 

1138def _normalize_host(host: None, scheme: str | None) -> None: ... 

1139 

1140 

1141@typing.overload 

1142def _normalize_host(host: str, scheme: str | None) -> str: ... 

1143 

1144 

1145def _normalize_host(host: str | None, scheme: str | None) -> str | None: 

1146 """ 

1147 Normalize hosts for comparisons and use with sockets. 

1148 """ 

1149 

1150 host = normalize_host(host, scheme) 

1151 

1152 # httplib doesn't like it when we include brackets in IPv6 addresses 

1153 # Specifically, if we include brackets but also pass the port then 

1154 # httplib crazily doubles up the square brackets on the Host header. 

1155 # Instead, we need to make sure we never pass ``None`` as the port. 

1156 # However, for backward compatibility reasons we can't actually 

1157 # *assert* that. See http://bugs.python.org/issue28539 

1158 if host and host.startswith("[") and host.endswith("]"): 

1159 host = host[1:-1] 

1160 return host 

1161 

1162 

1163def _url_from_pool( 

1164 pool: HTTPConnectionPool | HTTPSConnectionPool, path: str | None = None 

1165) -> str: 

1166 """Returns the URL from a given connection pool. This is mainly used for testing and logging.""" 

1167 return Url(scheme=pool.scheme, host=pool.host, port=pool.port, path=path).url 

1168 

1169 

1170def _close_pool_connections(pool: queue.LifoQueue[typing.Any]) -> None: 

1171 """Drains a queue of connections and closes each one.""" 

1172 try: 

1173 while True: 

1174 conn = pool.get(block=False) 

1175 if conn: 

1176 conn.close() 

1177 except queue.Empty: 

1178 pass # Done.