Coverage for /pythoncovmergedfiles/medio/medio/usr/local/lib/python3.8/site-packages/urllib3/connectionpool.py: 20%

Shortcuts on this page

r m x   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

345 statements  

1from __future__ import annotations 

2 

3import errno 

4import logging 

5import queue 

6import sys 

7import typing 

8import warnings 

9import weakref 

10from socket import timeout as SocketTimeout 

11from types import TracebackType 

12 

13from ._base_connection import _TYPE_BODY 

14from ._collections import HTTPHeaderDict 

15from ._request_methods import RequestMethods 

16from .connection import ( 

17 BaseSSLError, 

18 BrokenPipeError, 

19 DummyConnection, 

20 HTTPConnection, 

21 HTTPException, 

22 HTTPSConnection, 

23 ProxyConfig, 

24 _wrap_proxy_error, 

25) 

26from .connection import port_by_scheme as port_by_scheme 

27from .exceptions import ( 

28 ClosedPoolError, 

29 EmptyPoolError, 

30 FullPoolError, 

31 HostChangedError, 

32 InsecureRequestWarning, 

33 LocationValueError, 

34 MaxRetryError, 

35 NewConnectionError, 

36 ProtocolError, 

37 ProxyError, 

38 ReadTimeoutError, 

39 SSLError, 

40 TimeoutError, 

41) 

42from .response import BaseHTTPResponse 

43from .util.connection import is_connection_dropped 

44from .util.proxy import connection_requires_http_tunnel 

45from .util.request import _TYPE_BODY_POSITION, set_file_position 

46from .util.retry import Retry 

47from .util.ssl_match_hostname import CertificateError 

48from .util.timeout import _DEFAULT_TIMEOUT, _TYPE_DEFAULT, Timeout 

49from .util.url import Url, _encode_target 

50from .util.url import _normalize_host as normalize_host 

51from .util.url import parse_url 

52from .util.util import to_str 

53 

54if typing.TYPE_CHECKING: 

55 import ssl 

56 

57 from typing_extensions import Self 

58 

59 from ._base_connection import BaseHTTPConnection, BaseHTTPSConnection 

60 

61log = logging.getLogger(__name__) 

62 

63_TYPE_TIMEOUT = typing.Union[Timeout, float, _TYPE_DEFAULT, None] 

64 

65 

66# Pool objects 

67class ConnectionPool: 

68 """ 

69 Base class for all connection pools, such as 

70 :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`. 

71 

72 .. note:: 

73 ConnectionPool.urlopen() does not normalize or percent-encode target URIs 

74 which is useful if your target server doesn't support percent-encoded 

75 target URIs. 

76 """ 

77 

78 scheme: str | None = None 

79 QueueCls = queue.LifoQueue 

80 

81 def __init__(self, host: str, port: int | None = None) -> None: 

82 if not host: 

83 raise LocationValueError("No host specified.") 

84 

85 self.host = _normalize_host(host, scheme=self.scheme) 

86 self.port = port 

87 

88 # This property uses 'normalize_host()' (not '_normalize_host()') 

89 # to avoid removing square braces around IPv6 addresses. 

90 # This value is sent to `HTTPConnection.set_tunnel()` if called 

91 # because square braces are required for HTTP CONNECT tunneling. 

92 self._tunnel_host = normalize_host(host, scheme=self.scheme).lower() 

93 

94 def __str__(self) -> str: 

95 return f"{type(self).__name__}(host={self.host!r}, port={self.port!r})" 

96 

97 def __enter__(self) -> Self: 

98 return self 

99 

100 def __exit__( 

101 self, 

102 exc_type: type[BaseException] | None, 

103 exc_val: BaseException | None, 

104 exc_tb: TracebackType | None, 

105 ) -> typing.Literal[False]: 

106 self.close() 

107 # Return False to re-raise any potential exceptions 

108 return False 

109 

110 def close(self) -> None: 

111 """ 

112 Close all pooled connections and disable the pool. 

113 """ 

114 

115 

116# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252 

117_blocking_errnos = {errno.EAGAIN, errno.EWOULDBLOCK} 

118 

119 

120class HTTPConnectionPool(ConnectionPool, RequestMethods): 

121 """ 

122 Thread-safe connection pool for one host. 

123 

124 :param host: 

125 Host used for this HTTP Connection (e.g. "localhost"), passed into 

126 :class:`http.client.HTTPConnection`. 

127 

128 :param port: 

129 Port used for this HTTP Connection (None is equivalent to 80), passed 

130 into :class:`http.client.HTTPConnection`. 

131 

132 :param timeout: 

133 Socket timeout in seconds for each individual connection. This can 

134 be a float or integer, which sets the timeout for the HTTP request, 

135 or an instance of :class:`urllib3.util.Timeout` which gives you more 

136 fine-grained control over request timeouts. After the constructor has 

137 been parsed, this is always a `urllib3.util.Timeout` object. 

138 

139 :param maxsize: 

140 Number of connections to save that can be reused. More than 1 is useful 

141 in multithreaded situations. If ``block`` is set to False, more 

142 connections will be created but they will not be saved once they've 

143 been used. 

144 

145 :param block: 

146 If set to True, no more than ``maxsize`` connections will be used at 

147 a time. When no free connections are available, the call will block 

148 until a connection has been released. This is a useful side effect for 

149 particular multithreaded situations where one does not want to use more 

150 than maxsize connections per host to prevent flooding. 

151 

152 :param headers: 

153 Headers to include with all requests, unless other headers are given 

154 explicitly. 

155 

156 :param retries: 

157 Retry configuration to use by default with requests in this pool. 

158 

159 :param _proxy: 

160 Parsed proxy URL, should not be used directly, instead, see 

161 :class:`urllib3.ProxyManager` 

162 

163 :param _proxy_headers: 

164 A dictionary with proxy headers, should not be used directly, 

165 instead, see :class:`urllib3.ProxyManager` 

166 

167 :param \\**conn_kw: 

168 Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`, 

169 :class:`urllib3.connection.HTTPSConnection` instances. 

170 """ 

171 

172 scheme = "http" 

173 ConnectionCls: ( 

174 type[BaseHTTPConnection] | type[BaseHTTPSConnection] 

175 ) = HTTPConnection 

176 

177 def __init__( 

178 self, 

179 host: str, 

180 port: int | None = None, 

181 timeout: _TYPE_TIMEOUT | None = _DEFAULT_TIMEOUT, 

182 maxsize: int = 1, 

183 block: bool = False, 

184 headers: typing.Mapping[str, str] | None = None, 

185 retries: Retry | bool | int | None = None, 

186 _proxy: Url | None = None, 

187 _proxy_headers: typing.Mapping[str, str] | None = None, 

188 _proxy_config: ProxyConfig | None = None, 

189 **conn_kw: typing.Any, 

190 ): 

191 ConnectionPool.__init__(self, host, port) 

192 RequestMethods.__init__(self, headers) 

193 

194 if not isinstance(timeout, Timeout): 

195 timeout = Timeout.from_float(timeout) 

196 

197 if retries is None: 

198 retries = Retry.DEFAULT 

199 

200 self.timeout = timeout 

201 self.retries = retries 

202 

203 self.pool: queue.LifoQueue[typing.Any] | None = self.QueueCls(maxsize) 

204 self.block = block 

205 

206 self.proxy = _proxy 

207 self.proxy_headers = _proxy_headers or {} 

208 self.proxy_config = _proxy_config 

209 

210 # Fill the queue up so that doing get() on it will block properly 

211 for _ in range(maxsize): 

212 self.pool.put(None) 

213 

214 # These are mostly for testing and debugging purposes. 

215 self.num_connections = 0 

216 self.num_requests = 0 

217 self.conn_kw = conn_kw 

218 

219 if self.proxy: 

220 # Enable Nagle's algorithm for proxies, to avoid packet fragmentation. 

221 # We cannot know if the user has added default socket options, so we cannot replace the 

222 # list. 

223 self.conn_kw.setdefault("socket_options", []) 

224 

225 self.conn_kw["proxy"] = self.proxy 

226 self.conn_kw["proxy_config"] = self.proxy_config 

227 

228 # Do not pass 'self' as callback to 'finalize'. 

229 # Then the 'finalize' would keep an endless living (leak) to self. 

230 # By just passing a reference to the pool allows the garbage collector 

231 # to free self if nobody else has a reference to it. 

232 pool = self.pool 

233 

234 # Close all the HTTPConnections in the pool before the 

235 # HTTPConnectionPool object is garbage collected. 

236 weakref.finalize(self, _close_pool_connections, pool) 

237 

238 def _new_conn(self) -> BaseHTTPConnection: 

239 """ 

240 Return a fresh :class:`HTTPConnection`. 

241 """ 

242 self.num_connections += 1 

243 log.debug( 

244 "Starting new HTTP connection (%d): %s:%s", 

245 self.num_connections, 

246 self.host, 

247 self.port or "80", 

248 ) 

249 

250 conn = self.ConnectionCls( 

251 host=self.host, 

252 port=self.port, 

253 timeout=self.timeout.connect_timeout, 

254 **self.conn_kw, 

255 ) 

256 return conn 

257 

258 def _get_conn(self, timeout: float | None = None) -> BaseHTTPConnection: 

259 """ 

260 Get a connection. Will return a pooled connection if one is available. 

261 

262 If no connections are available and :prop:`.block` is ``False``, then a 

263 fresh connection is returned. 

264 

265 :param timeout: 

266 Seconds to wait before giving up and raising 

267 :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and 

268 :prop:`.block` is ``True``. 

269 """ 

270 conn = None 

271 

272 if self.pool is None: 

273 raise ClosedPoolError(self, "Pool is closed.") 

274 

275 try: 

276 conn = self.pool.get(block=self.block, timeout=timeout) 

277 

278 except AttributeError: # self.pool is None 

279 raise ClosedPoolError(self, "Pool is closed.") from None # Defensive: 

280 

281 except queue.Empty: 

282 if self.block: 

283 raise EmptyPoolError( 

284 self, 

285 "Pool is empty and a new connection can't be opened due to blocking mode.", 

286 ) from None 

287 pass # Oh well, we'll create a new connection then 

288 

289 # If this is a persistent connection, check if it got disconnected 

290 if conn and is_connection_dropped(conn): 

291 log.debug("Resetting dropped connection: %s", self.host) 

292 conn.close() 

293 

294 return conn or self._new_conn() 

295 

296 def _put_conn(self, conn: BaseHTTPConnection | None) -> None: 

297 """ 

298 Put a connection back into the pool. 

299 

300 :param conn: 

301 Connection object for the current host and port as returned by 

302 :meth:`._new_conn` or :meth:`._get_conn`. 

303 

304 If the pool is already full, the connection is closed and discarded 

305 because we exceeded maxsize. If connections are discarded frequently, 

306 then maxsize should be increased. 

307 

308 If the pool is closed, then the connection will be closed and discarded. 

309 """ 

310 if self.pool is not None: 

311 try: 

312 self.pool.put(conn, block=False) 

313 return # Everything is dandy, done. 

314 except AttributeError: 

315 # self.pool is None. 

316 pass 

317 except queue.Full: 

318 # Connection never got put back into the pool, close it. 

319 if conn: 

320 conn.close() 

321 

322 if self.block: 

323 # This should never happen if you got the conn from self._get_conn 

324 raise FullPoolError( 

325 self, 

326 "Pool reached maximum size and no more connections are allowed.", 

327 ) from None 

328 

329 log.warning( 

330 "Connection pool is full, discarding connection: %s. Connection pool size: %s", 

331 self.host, 

332 self.pool.qsize(), 

333 ) 

334 

335 # Connection never got put back into the pool, close it. 

336 if conn: 

337 conn.close() 

338 

339 def _validate_conn(self, conn: BaseHTTPConnection) -> None: 

340 """ 

341 Called right before a request is made, after the socket is created. 

342 """ 

343 

344 def _prepare_proxy(self, conn: BaseHTTPConnection) -> None: 

345 # Nothing to do for HTTP connections. 

346 pass 

347 

348 def _get_timeout(self, timeout: _TYPE_TIMEOUT) -> Timeout: 

349 """Helper that always returns a :class:`urllib3.util.Timeout`""" 

350 if timeout is _DEFAULT_TIMEOUT: 

351 return self.timeout.clone() 

352 

353 if isinstance(timeout, Timeout): 

354 return timeout.clone() 

355 else: 

356 # User passed us an int/float. This is for backwards compatibility, 

357 # can be removed later 

358 return Timeout.from_float(timeout) 

359 

360 def _raise_timeout( 

361 self, 

362 err: BaseSSLError | OSError | SocketTimeout, 

363 url: str, 

364 timeout_value: _TYPE_TIMEOUT | None, 

365 ) -> None: 

366 """Is the error actually a timeout? Will raise a ReadTimeout or pass""" 

367 

368 if isinstance(err, SocketTimeout): 

369 raise ReadTimeoutError( 

370 self, url, f"Read timed out. (read timeout={timeout_value})" 

371 ) from err 

372 

373 # See the above comment about EAGAIN in Python 3. 

374 if hasattr(err, "errno") and err.errno in _blocking_errnos: 

375 raise ReadTimeoutError( 

376 self, url, f"Read timed out. (read timeout={timeout_value})" 

377 ) from err 

378 

379 def _make_request( 

380 self, 

381 conn: BaseHTTPConnection, 

382 method: str, 

383 url: str, 

384 body: _TYPE_BODY | None = None, 

385 headers: typing.Mapping[str, str] | None = None, 

386 retries: Retry | None = None, 

387 timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, 

388 chunked: bool = False, 

389 response_conn: BaseHTTPConnection | None = None, 

390 preload_content: bool = True, 

391 decode_content: bool = True, 

392 enforce_content_length: bool = True, 

393 ) -> BaseHTTPResponse: 

394 """ 

395 Perform a request on a given urllib connection object taken from our 

396 pool. 

397 

398 :param conn: 

399 a connection from one of our connection pools 

400 

401 :param method: 

402 HTTP request method (such as GET, POST, PUT, etc.) 

403 

404 :param url: 

405 The URL to perform the request on. 

406 

407 :param body: 

408 Data to send in the request body, either :class:`str`, :class:`bytes`, 

409 an iterable of :class:`str`/:class:`bytes`, or a file-like object. 

410 

411 :param headers: 

412 Dictionary of custom headers to send, such as User-Agent, 

413 If-None-Match, etc. If None, pool headers are used. If provided, 

414 these headers completely replace any pool-specific headers. 

415 

416 :param retries: 

417 Configure the number of retries to allow before raising a 

418 :class:`~urllib3.exceptions.MaxRetryError` exception. 

419 

420 Pass ``None`` to retry until you receive a response. Pass a 

421 :class:`~urllib3.util.retry.Retry` object for fine-grained control 

422 over different types of retries. 

423 Pass an integer number to retry connection errors that many times, 

424 but no other types of errors. Pass zero to never retry. 

425 

426 If ``False``, then retries are disabled and any exception is raised 

427 immediately. Also, instead of raising a MaxRetryError on redirects, 

428 the redirect response will be returned. 

429 

430 :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. 

431 

432 :param timeout: 

433 If specified, overrides the default timeout for this one 

434 request. It may be a float (in seconds) or an instance of 

435 :class:`urllib3.util.Timeout`. 

436 

437 :param chunked: 

438 If True, urllib3 will send the body using chunked transfer 

439 encoding. Otherwise, urllib3 will send the body using the standard 

440 content-length form. Defaults to False. 

441 

442 :param response_conn: 

443 Set this to ``None`` if you will handle releasing the connection or 

444 set the connection to have the response release it. 

445 

446 :param preload_content: 

447 If True, the response's body will be preloaded during construction. 

448 

449 :param decode_content: 

450 If True, will attempt to decode the body based on the 

451 'content-encoding' header. 

452 

453 :param enforce_content_length: 

454 Enforce content length checking. Body returned by server must match 

455 value of Content-Length header, if present. Otherwise, raise error. 

456 """ 

457 self.num_requests += 1 

458 

459 timeout_obj = self._get_timeout(timeout) 

460 timeout_obj.start_connect() 

461 conn.timeout = Timeout.resolve_default_timeout(timeout_obj.connect_timeout) 

462 

463 try: 

464 # Trigger any extra validation we need to do. 

465 try: 

466 self._validate_conn(conn) 

467 except (SocketTimeout, BaseSSLError) as e: 

468 self._raise_timeout(err=e, url=url, timeout_value=conn.timeout) 

469 raise 

470 

471 # _validate_conn() starts the connection to an HTTPS proxy 

472 # so we need to wrap errors with 'ProxyError' here too. 

473 except ( 

474 OSError, 

475 NewConnectionError, 

476 TimeoutError, 

477 BaseSSLError, 

478 CertificateError, 

479 SSLError, 

480 ) as e: 

481 new_e: Exception = e 

482 if isinstance(e, (BaseSSLError, CertificateError)): 

483 new_e = SSLError(e) 

484 # If the connection didn't successfully connect to it's proxy 

485 # then there 

486 if isinstance( 

487 new_e, (OSError, NewConnectionError, TimeoutError, SSLError) 

488 ) and (conn and conn.proxy and not conn.has_connected_to_proxy): 

489 new_e = _wrap_proxy_error(new_e, conn.proxy.scheme) 

490 raise new_e 

491 

492 # conn.request() calls http.client.*.request, not the method in 

493 # urllib3.request. It also calls makefile (recv) on the socket. 

494 try: 

495 conn.request( 

496 method, 

497 url, 

498 body=body, 

499 headers=headers, 

500 chunked=chunked, 

501 preload_content=preload_content, 

502 decode_content=decode_content, 

503 enforce_content_length=enforce_content_length, 

504 ) 

505 

506 # We are swallowing BrokenPipeError (errno.EPIPE) since the server is 

507 # legitimately able to close the connection after sending a valid response. 

508 # With this behaviour, the received response is still readable. 

509 except BrokenPipeError: 

510 pass 

511 except OSError as e: 

512 # MacOS/Linux 

513 # EPROTOTYPE and ECONNRESET are needed on macOS 

514 # https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/ 

515 # Condition changed later to emit ECONNRESET instead of only EPROTOTYPE. 

516 if e.errno != errno.EPROTOTYPE and e.errno != errno.ECONNRESET: 

517 raise 

518 

519 # Reset the timeout for the recv() on the socket 

520 read_timeout = timeout_obj.read_timeout 

521 

522 if not conn.is_closed: 

523 # In Python 3 socket.py will catch EAGAIN and return None when you 

524 # try and read into the file pointer created by http.client, which 

525 # instead raises a BadStatusLine exception. Instead of catching 

526 # the exception and assuming all BadStatusLine exceptions are read 

527 # timeouts, check for a zero timeout before making the request. 

528 if read_timeout == 0: 

529 raise ReadTimeoutError( 

530 self, url, f"Read timed out. (read timeout={read_timeout})" 

531 ) 

532 conn.timeout = read_timeout 

533 

534 # Receive the response from the server 

535 try: 

536 response = conn.getresponse() 

537 except (BaseSSLError, OSError) as e: 

538 self._raise_timeout(err=e, url=url, timeout_value=read_timeout) 

539 raise 

540 

541 # Set properties that are used by the pooling layer. 

542 response.retries = retries 

543 response._connection = response_conn # type: ignore[attr-defined] 

544 response._pool = self # type: ignore[attr-defined] 

545 

546 log.debug( 

547 '%s://%s:%s "%s %s HTTP/%s" %s %s', 

548 self.scheme, 

549 self.host, 

550 self.port, 

551 method, 

552 url, 

553 response.version, 

554 response.status, 

555 response.length_remaining, 

556 ) 

557 

558 return response 

559 

560 def close(self) -> None: 

561 """ 

562 Close all pooled connections and disable the pool. 

563 """ 

564 if self.pool is None: 

565 return 

566 # Disable access to the pool 

567 old_pool, self.pool = self.pool, None 

568 

569 # Close all the HTTPConnections in the pool. 

570 _close_pool_connections(old_pool) 

571 

572 def is_same_host(self, url: str) -> bool: 

573 """ 

574 Check if the given ``url`` is a member of the same host as this 

575 connection pool. 

576 """ 

577 if url.startswith("/"): 

578 return True 

579 

580 # TODO: Add optional support for socket.gethostbyname checking. 

581 scheme, _, host, port, *_ = parse_url(url) 

582 scheme = scheme or "http" 

583 if host is not None: 

584 host = _normalize_host(host, scheme=scheme) 

585 

586 # Use explicit default port for comparison when none is given 

587 if self.port and not port: 

588 port = port_by_scheme.get(scheme) 

589 elif not self.port and port == port_by_scheme.get(scheme): 

590 port = None 

591 

592 return (scheme, host, port) == (self.scheme, self.host, self.port) 

593 

594 def urlopen( # type: ignore[override] 

595 self, 

596 method: str, 

597 url: str, 

598 body: _TYPE_BODY | None = None, 

599 headers: typing.Mapping[str, str] | None = None, 

600 retries: Retry | bool | int | None = None, 

601 redirect: bool = True, 

602 assert_same_host: bool = True, 

603 timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, 

604 pool_timeout: int | None = None, 

605 release_conn: bool | None = None, 

606 chunked: bool = False, 

607 body_pos: _TYPE_BODY_POSITION | None = None, 

608 preload_content: bool = True, 

609 decode_content: bool = True, 

610 **response_kw: typing.Any, 

611 ) -> BaseHTTPResponse: 

612 """ 

613 Get a connection from the pool and perform an HTTP request. This is the 

614 lowest level call for making a request, so you'll need to specify all 

615 the raw details. 

616 

617 .. note:: 

618 

619 More commonly, it's appropriate to use a convenience method 

620 such as :meth:`request`. 

621 

622 .. note:: 

623 

624 `release_conn` will only behave as expected if 

625 `preload_content=False` because we want to make 

626 `preload_content=False` the default behaviour someday soon without 

627 breaking backwards compatibility. 

628 

629 :param method: 

630 HTTP request method (such as GET, POST, PUT, etc.) 

631 

632 :param url: 

633 The URL to perform the request on. 

634 

635 :param body: 

636 Data to send in the request body, either :class:`str`, :class:`bytes`, 

637 an iterable of :class:`str`/:class:`bytes`, or a file-like object. 

638 

639 :param headers: 

640 Dictionary of custom headers to send, such as User-Agent, 

641 If-None-Match, etc. If None, pool headers are used. If provided, 

642 these headers completely replace any pool-specific headers. 

643 

644 :param retries: 

645 Configure the number of retries to allow before raising a 

646 :class:`~urllib3.exceptions.MaxRetryError` exception. 

647 

648 If ``None`` (default) will retry 3 times, see ``Retry.DEFAULT``. Pass a 

649 :class:`~urllib3.util.retry.Retry` object for fine-grained control 

650 over different types of retries. 

651 Pass an integer number to retry connection errors that many times, 

652 but no other types of errors. Pass zero to never retry. 

653 

654 If ``False``, then retries are disabled and any exception is raised 

655 immediately. Also, instead of raising a MaxRetryError on redirects, 

656 the redirect response will be returned. 

657 

658 :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. 

659 

660 :param redirect: 

661 If True, automatically handle redirects (status codes 301, 302, 

662 303, 307, 308). Each redirect counts as a retry. Disabling retries 

663 will disable redirect, too. 

664 

665 :param assert_same_host: 

666 If ``True``, will make sure that the host of the pool requests is 

667 consistent else will raise HostChangedError. When ``False``, you can 

668 use the pool on an HTTP proxy and request foreign hosts. 

669 

670 :param timeout: 

671 If specified, overrides the default timeout for this one 

672 request. It may be a float (in seconds) or an instance of 

673 :class:`urllib3.util.Timeout`. 

674 

675 :param pool_timeout: 

676 If set and the pool is set to block=True, then this method will 

677 block for ``pool_timeout`` seconds and raise EmptyPoolError if no 

678 connection is available within the time period. 

679 

680 :param bool preload_content: 

681 If True, the response's body will be preloaded into memory. 

682 

683 :param bool decode_content: 

684 If True, will attempt to decode the body based on the 

685 'content-encoding' header. 

686 

687 :param release_conn: 

688 If False, then the urlopen call will not release the connection 

689 back into the pool once a response is received (but will release if 

690 you read the entire contents of the response such as when 

691 `preload_content=True`). This is useful if you're not preloading 

692 the response's content immediately. You will need to call 

693 ``r.release_conn()`` on the response ``r`` to return the connection 

694 back into the pool. If None, it takes the value of ``preload_content`` 

695 which defaults to ``True``. 

696 

697 :param bool chunked: 

698 If True, urllib3 will send the body using chunked transfer 

699 encoding. Otherwise, urllib3 will send the body using the standard 

700 content-length form. Defaults to False. 

701 

702 :param int body_pos: 

703 Position to seek to in file-like body in the event of a retry or 

704 redirect. Typically this won't need to be set because urllib3 will 

705 auto-populate the value when needed. 

706 """ 

707 parsed_url = parse_url(url) 

708 destination_scheme = parsed_url.scheme 

709 

710 if headers is None: 

711 headers = self.headers 

712 

713 if not isinstance(retries, Retry): 

714 retries = Retry.from_int(retries, redirect=redirect, default=self.retries) 

715 

716 if release_conn is None: 

717 release_conn = preload_content 

718 

719 # Check host 

720 if assert_same_host and not self.is_same_host(url): 

721 raise HostChangedError(self, url, retries) 

722 

723 # Ensure that the URL we're connecting to is properly encoded 

724 if url.startswith("/"): 

725 url = to_str(_encode_target(url)) 

726 else: 

727 url = to_str(parsed_url.url) 

728 

729 conn = None 

730 

731 # Track whether `conn` needs to be released before 

732 # returning/raising/recursing. Update this variable if necessary, and 

733 # leave `release_conn` constant throughout the function. That way, if 

734 # the function recurses, the original value of `release_conn` will be 

735 # passed down into the recursive call, and its value will be respected. 

736 # 

737 # See issue #651 [1] for details. 

738 # 

739 # [1] <https://github.com/urllib3/urllib3/issues/651> 

740 release_this_conn = release_conn 

741 

742 http_tunnel_required = connection_requires_http_tunnel( 

743 self.proxy, self.proxy_config, destination_scheme 

744 ) 

745 

746 # Merge the proxy headers. Only done when not using HTTP CONNECT. We 

747 # have to copy the headers dict so we can safely change it without those 

748 # changes being reflected in anyone else's copy. 

749 if not http_tunnel_required: 

750 headers = headers.copy() # type: ignore[attr-defined] 

751 headers.update(self.proxy_headers) # type: ignore[union-attr] 

752 

753 # Must keep the exception bound to a separate variable or else Python 3 

754 # complains about UnboundLocalError. 

755 err = None 

756 

757 # Keep track of whether we cleanly exited the except block. This 

758 # ensures we do proper cleanup in finally. 

759 clean_exit = False 

760 

761 # Rewind body position, if needed. Record current position 

762 # for future rewinds in the event of a redirect/retry. 

763 body_pos = set_file_position(body, body_pos) 

764 

765 try: 

766 # Request a connection from the queue. 

767 timeout_obj = self._get_timeout(timeout) 

768 conn = self._get_conn(timeout=pool_timeout) 

769 

770 conn.timeout = timeout_obj.connect_timeout # type: ignore[assignment] 

771 

772 # Is this a closed/new connection that requires CONNECT tunnelling? 

773 if self.proxy is not None and http_tunnel_required and conn.is_closed: 

774 try: 

775 self._prepare_proxy(conn) 

776 except (BaseSSLError, OSError, SocketTimeout) as e: 

777 self._raise_timeout( 

778 err=e, url=self.proxy.url, timeout_value=conn.timeout 

779 ) 

780 raise 

781 

782 # If we're going to release the connection in ``finally:``, then 

783 # the response doesn't need to know about the connection. Otherwise 

784 # it will also try to release it and we'll have a double-release 

785 # mess. 

786 response_conn = conn if not release_conn else None 

787 

788 # Make the request on the HTTPConnection object 

789 response = self._make_request( 

790 conn, 

791 method, 

792 url, 

793 timeout=timeout_obj, 

794 body=body, 

795 headers=headers, 

796 chunked=chunked, 

797 retries=retries, 

798 response_conn=response_conn, 

799 preload_content=preload_content, 

800 decode_content=decode_content, 

801 **response_kw, 

802 ) 

803 

804 # Everything went great! 

805 clean_exit = True 

806 

807 except EmptyPoolError: 

808 # Didn't get a connection from the pool, no need to clean up 

809 clean_exit = True 

810 release_this_conn = False 

811 raise 

812 

813 except ( 

814 TimeoutError, 

815 HTTPException, 

816 OSError, 

817 ProtocolError, 

818 BaseSSLError, 

819 SSLError, 

820 CertificateError, 

821 ProxyError, 

822 ) as e: 

823 # Discard the connection for these exceptions. It will be 

824 # replaced during the next _get_conn() call. 

825 clean_exit = False 

826 new_e: Exception = e 

827 if isinstance(e, (BaseSSLError, CertificateError)): 

828 new_e = SSLError(e) 

829 if isinstance( 

830 new_e, 

831 ( 

832 OSError, 

833 NewConnectionError, 

834 TimeoutError, 

835 SSLError, 

836 HTTPException, 

837 ), 

838 ) and (conn and conn.proxy and not conn.has_connected_to_proxy): 

839 new_e = _wrap_proxy_error(new_e, conn.proxy.scheme) 

840 elif isinstance(new_e, (OSError, HTTPException)): 

841 new_e = ProtocolError("Connection aborted.", new_e) 

842 

843 retries = retries.increment( 

844 method, url, error=new_e, _pool=self, _stacktrace=sys.exc_info()[2] 

845 ) 

846 retries.sleep() 

847 

848 # Keep track of the error for the retry warning. 

849 err = e 

850 

851 finally: 

852 if not clean_exit: 

853 # We hit some kind of exception, handled or otherwise. We need 

854 # to throw the connection away unless explicitly told not to. 

855 # Close the connection, set the variable to None, and make sure 

856 # we put the None back in the pool to avoid leaking it. 

857 if conn: 

858 conn.close() 

859 conn = None 

860 release_this_conn = True 

861 

862 if release_this_conn: 

863 # Put the connection back to be reused. If the connection is 

864 # expired then it will be None, which will get replaced with a 

865 # fresh connection during _get_conn. 

866 self._put_conn(conn) 

867 

868 if not conn: 

869 # Try again 

870 log.warning( 

871 "Retrying (%r) after connection broken by '%r': %s", retries, err, url 

872 ) 

873 return self.urlopen( 

874 method, 

875 url, 

876 body, 

877 headers, 

878 retries, 

879 redirect, 

880 assert_same_host, 

881 timeout=timeout, 

882 pool_timeout=pool_timeout, 

883 release_conn=release_conn, 

884 chunked=chunked, 

885 body_pos=body_pos, 

886 preload_content=preload_content, 

887 decode_content=decode_content, 

888 **response_kw, 

889 ) 

890 

891 # Handle redirect? 

892 redirect_location = redirect and response.get_redirect_location() 

893 if redirect_location: 

894 if response.status == 303: 

895 # Change the method according to RFC 9110, Section 15.4.4. 

896 method = "GET" 

897 # And lose the body not to transfer anything sensitive. 

898 body = None 

899 headers = HTTPHeaderDict(headers)._prepare_for_method_change() 

900 

901 try: 

902 retries = retries.increment(method, url, response=response, _pool=self) 

903 except MaxRetryError: 

904 if retries.raise_on_redirect: 

905 response.drain_conn() 

906 raise 

907 return response 

908 

909 response.drain_conn() 

910 retries.sleep_for_retry(response) 

911 log.debug("Redirecting %s -> %s", url, redirect_location) 

912 return self.urlopen( 

913 method, 

914 redirect_location, 

915 body, 

916 headers, 

917 retries=retries, 

918 redirect=redirect, 

919 assert_same_host=assert_same_host, 

920 timeout=timeout, 

921 pool_timeout=pool_timeout, 

922 release_conn=release_conn, 

923 chunked=chunked, 

924 body_pos=body_pos, 

925 preload_content=preload_content, 

926 decode_content=decode_content, 

927 **response_kw, 

928 ) 

929 

930 # Check if we should retry the HTTP response. 

931 has_retry_after = bool(response.headers.get("Retry-After")) 

932 if retries.is_retry(method, response.status, has_retry_after): 

933 try: 

934 retries = retries.increment(method, url, response=response, _pool=self) 

935 except MaxRetryError: 

936 if retries.raise_on_status: 

937 response.drain_conn() 

938 raise 

939 return response 

940 

941 response.drain_conn() 

942 retries.sleep(response) 

943 log.debug("Retry: %s", url) 

944 return self.urlopen( 

945 method, 

946 url, 

947 body, 

948 headers, 

949 retries=retries, 

950 redirect=redirect, 

951 assert_same_host=assert_same_host, 

952 timeout=timeout, 

953 pool_timeout=pool_timeout, 

954 release_conn=release_conn, 

955 chunked=chunked, 

956 body_pos=body_pos, 

957 preload_content=preload_content, 

958 decode_content=decode_content, 

959 **response_kw, 

960 ) 

961 

962 return response 

963 

964 

965class HTTPSConnectionPool(HTTPConnectionPool): 

966 """ 

967 Same as :class:`.HTTPConnectionPool`, but HTTPS. 

968 

969 :class:`.HTTPSConnection` uses one of ``assert_fingerprint``, 

970 ``assert_hostname`` and ``host`` in this order to verify connections. 

971 If ``assert_hostname`` is False, no verification is done. 

972 

973 The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``, 

974 ``ca_cert_dir``, ``ssl_version``, ``key_password`` are only used if :mod:`ssl` 

975 is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade 

976 the connection socket into an SSL socket. 

977 """ 

978 

979 scheme = "https" 

980 ConnectionCls: type[BaseHTTPSConnection] = HTTPSConnection 

981 

982 def __init__( 

983 self, 

984 host: str, 

985 port: int | None = None, 

986 timeout: _TYPE_TIMEOUT | None = _DEFAULT_TIMEOUT, 

987 maxsize: int = 1, 

988 block: bool = False, 

989 headers: typing.Mapping[str, str] | None = None, 

990 retries: Retry | bool | int | None = None, 

991 _proxy: Url | None = None, 

992 _proxy_headers: typing.Mapping[str, str] | None = None, 

993 key_file: str | None = None, 

994 cert_file: str | None = None, 

995 cert_reqs: int | str | None = None, 

996 key_password: str | None = None, 

997 ca_certs: str | None = None, 

998 ssl_version: int | str | None = None, 

999 ssl_minimum_version: ssl.TLSVersion | None = None, 

1000 ssl_maximum_version: ssl.TLSVersion | None = None, 

1001 assert_hostname: str | typing.Literal[False] | None = None, 

1002 assert_fingerprint: str | None = None, 

1003 ca_cert_dir: str | None = None, 

1004 **conn_kw: typing.Any, 

1005 ) -> None: 

1006 super().__init__( 

1007 host, 

1008 port, 

1009 timeout, 

1010 maxsize, 

1011 block, 

1012 headers, 

1013 retries, 

1014 _proxy, 

1015 _proxy_headers, 

1016 **conn_kw, 

1017 ) 

1018 

1019 self.key_file = key_file 

1020 self.cert_file = cert_file 

1021 self.cert_reqs = cert_reqs 

1022 self.key_password = key_password 

1023 self.ca_certs = ca_certs 

1024 self.ca_cert_dir = ca_cert_dir 

1025 self.ssl_version = ssl_version 

1026 self.ssl_minimum_version = ssl_minimum_version 

1027 self.ssl_maximum_version = ssl_maximum_version 

1028 self.assert_hostname = assert_hostname 

1029 self.assert_fingerprint = assert_fingerprint 

1030 

1031 def _prepare_proxy(self, conn: HTTPSConnection) -> None: # type: ignore[override] 

1032 """Establishes a tunnel connection through HTTP CONNECT.""" 

1033 if self.proxy and self.proxy.scheme == "https": 

1034 tunnel_scheme = "https" 

1035 else: 

1036 tunnel_scheme = "http" 

1037 

1038 conn.set_tunnel( 

1039 scheme=tunnel_scheme, 

1040 host=self._tunnel_host, 

1041 port=self.port, 

1042 headers=self.proxy_headers, 

1043 ) 

1044 conn.connect() 

1045 

1046 def _new_conn(self) -> BaseHTTPSConnection: 

1047 """ 

1048 Return a fresh :class:`urllib3.connection.HTTPConnection`. 

1049 """ 

1050 self.num_connections += 1 

1051 log.debug( 

1052 "Starting new HTTPS connection (%d): %s:%s", 

1053 self.num_connections, 

1054 self.host, 

1055 self.port or "443", 

1056 ) 

1057 

1058 if not self.ConnectionCls or self.ConnectionCls is DummyConnection: # type: ignore[comparison-overlap] 

1059 raise ImportError( 

1060 "Can't connect to HTTPS URL because the SSL module is not available." 

1061 ) 

1062 

1063 actual_host: str = self.host 

1064 actual_port = self.port 

1065 if self.proxy is not None and self.proxy.host is not None: 

1066 actual_host = self.proxy.host 

1067 actual_port = self.proxy.port 

1068 

1069 return self.ConnectionCls( 

1070 host=actual_host, 

1071 port=actual_port, 

1072 timeout=self.timeout.connect_timeout, 

1073 cert_file=self.cert_file, 

1074 key_file=self.key_file, 

1075 key_password=self.key_password, 

1076 cert_reqs=self.cert_reqs, 

1077 ca_certs=self.ca_certs, 

1078 ca_cert_dir=self.ca_cert_dir, 

1079 assert_hostname=self.assert_hostname, 

1080 assert_fingerprint=self.assert_fingerprint, 

1081 ssl_version=self.ssl_version, 

1082 ssl_minimum_version=self.ssl_minimum_version, 

1083 ssl_maximum_version=self.ssl_maximum_version, 

1084 **self.conn_kw, 

1085 ) 

1086 

1087 def _validate_conn(self, conn: BaseHTTPConnection) -> None: 

1088 """ 

1089 Called right before a request is made, after the socket is created. 

1090 """ 

1091 super()._validate_conn(conn) 

1092 

1093 # Force connect early to allow us to validate the connection. 

1094 if conn.is_closed: 

1095 conn.connect() 

1096 

1097 # TODO revise this, see https://github.com/urllib3/urllib3/issues/2791 

1098 if not conn.is_verified and not conn.proxy_is_verified: 

1099 warnings.warn( 

1100 ( 

1101 f"Unverified HTTPS request is being made to host '{conn.host}'. " 

1102 "Adding certificate verification is strongly advised. See: " 

1103 "https://urllib3.readthedocs.io/en/latest/advanced-usage.html" 

1104 "#tls-warnings" 

1105 ), 

1106 InsecureRequestWarning, 

1107 ) 

1108 

1109 

1110def connection_from_url(url: str, **kw: typing.Any) -> HTTPConnectionPool: 

1111 """ 

1112 Given a url, return an :class:`.ConnectionPool` instance of its host. 

1113 

1114 This is a shortcut for not having to parse out the scheme, host, and port 

1115 of the url before creating an :class:`.ConnectionPool` instance. 

1116 

1117 :param url: 

1118 Absolute URL string that must include the scheme. Port is optional. 

1119 

1120 :param \\**kw: 

1121 Passes additional parameters to the constructor of the appropriate 

1122 :class:`.ConnectionPool`. Useful for specifying things like 

1123 timeout, maxsize, headers, etc. 

1124 

1125 Example:: 

1126 

1127 >>> conn = connection_from_url('http://google.com/') 

1128 >>> r = conn.request('GET', '/') 

1129 """ 

1130 scheme, _, host, port, *_ = parse_url(url) 

1131 scheme = scheme or "http" 

1132 port = port or port_by_scheme.get(scheme, 80) 

1133 if scheme == "https": 

1134 return HTTPSConnectionPool(host, port=port, **kw) # type: ignore[arg-type] 

1135 else: 

1136 return HTTPConnectionPool(host, port=port, **kw) # type: ignore[arg-type] 

1137 

1138 

1139@typing.overload 

1140def _normalize_host(host: None, scheme: str | None) -> None: 

1141 ... 

1142 

1143 

1144@typing.overload 

1145def _normalize_host(host: str, scheme: str | None) -> str: 

1146 ... 

1147 

1148 

1149def _normalize_host(host: str | None, scheme: str | None) -> str | None: 

1150 """ 

1151 Normalize hosts for comparisons and use with sockets. 

1152 """ 

1153 

1154 host = normalize_host(host, scheme) 

1155 

1156 # httplib doesn't like it when we include brackets in IPv6 addresses 

1157 # Specifically, if we include brackets but also pass the port then 

1158 # httplib crazily doubles up the square brackets on the Host header. 

1159 # Instead, we need to make sure we never pass ``None`` as the port. 

1160 # However, for backward compatibility reasons we can't actually 

1161 # *assert* that. See http://bugs.python.org/issue28539 

1162 if host and host.startswith("[") and host.endswith("]"): 

1163 host = host[1:-1] 

1164 return host 

1165 

1166 

1167def _url_from_pool( 

1168 pool: HTTPConnectionPool | HTTPSConnectionPool, path: str | None = None 

1169) -> str: 

1170 """Returns the URL from a given connection pool. This is mainly used for testing and logging.""" 

1171 return Url(scheme=pool.scheme, host=pool.host, port=pool.port, path=path).url 

1172 

1173 

1174def _close_pool_connections(pool: queue.LifoQueue[typing.Any]) -> None: 

1175 """Drains a queue of connections and closes each one.""" 

1176 try: 

1177 while True: 

1178 conn = pool.get(block=False) 

1179 if conn: 

1180 conn.close() 

1181 except queue.Empty: 

1182 pass # Done.