connectionpool.py 42 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178
  1. from __future__ import annotations
  2. import errno
  3. import logging
  4. import queue
  5. import sys
  6. import typing
  7. import warnings
  8. import weakref
  9. from socket import timeout as SocketTimeout
  10. from types import TracebackType
  11. from ._base_connection import _TYPE_BODY
  12. from ._collections import HTTPHeaderDict
  13. from ._request_methods import RequestMethods
  14. from .connection import (
  15. BaseSSLError,
  16. BrokenPipeError,
  17. DummyConnection,
  18. HTTPConnection,
  19. HTTPException,
  20. HTTPSConnection,
  21. ProxyConfig,
  22. _wrap_proxy_error,
  23. )
  24. from .connection import port_by_scheme as port_by_scheme
  25. from .exceptions import (
  26. ClosedPoolError,
  27. EmptyPoolError,
  28. FullPoolError,
  29. HostChangedError,
  30. InsecureRequestWarning,
  31. LocationValueError,
  32. MaxRetryError,
  33. NewConnectionError,
  34. ProtocolError,
  35. ProxyError,
  36. ReadTimeoutError,
  37. SSLError,
  38. TimeoutError,
  39. )
  40. from .response import BaseHTTPResponse
  41. from .util.connection import is_connection_dropped
  42. from .util.proxy import connection_requires_http_tunnel
  43. from .util.request import _TYPE_BODY_POSITION, set_file_position
  44. from .util.retry import Retry
  45. from .util.ssl_match_hostname import CertificateError
  46. from .util.timeout import _DEFAULT_TIMEOUT, _TYPE_DEFAULT, Timeout
  47. from .util.url import Url, _encode_target
  48. from .util.url import _normalize_host as normalize_host
  49. from .util.url import parse_url
  50. from .util.util import to_str
  51. if typing.TYPE_CHECKING:
  52. import ssl
  53. from typing_extensions import Self
  54. from ._base_connection import BaseHTTPConnection, BaseHTTPSConnection
  55. log = logging.getLogger(__name__)
  56. _TYPE_TIMEOUT = typing.Union[Timeout, float, _TYPE_DEFAULT, None]
  57. # Pool objects
  58. class ConnectionPool:
  59. """
  60. Base class for all connection pools, such as
  61. :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`.
  62. .. note::
  63. ConnectionPool.urlopen() does not normalize or percent-encode target URIs
  64. which is useful if your target server doesn't support percent-encoded
  65. target URIs.
  66. """
  67. scheme: str | None = None
  68. QueueCls = queue.LifoQueue
  69. def __init__(self, host: str, port: int | None = None) -> None:
  70. if not host:
  71. raise LocationValueError("No host specified.")
  72. self.host = _normalize_host(host, scheme=self.scheme)
  73. self.port = port
  74. # This property uses 'normalize_host()' (not '_normalize_host()')
  75. # to avoid removing square braces around IPv6 addresses.
  76. # This value is sent to `HTTPConnection.set_tunnel()` if called
  77. # because square braces are required for HTTP CONNECT tunneling.
  78. self._tunnel_host = normalize_host(host, scheme=self.scheme).lower()
  79. def __str__(self) -> str:
  80. return f"{type(self).__name__}(host={self.host!r}, port={self.port!r})"
  81. def __enter__(self) -> Self:
  82. return self
  83. def __exit__(
  84. self,
  85. exc_type: type[BaseException] | None,
  86. exc_val: BaseException | None,
  87. exc_tb: TracebackType | None,
  88. ) -> typing.Literal[False]:
  89. self.close()
  90. # Return False to re-raise any potential exceptions
  91. return False
  92. def close(self) -> None:
  93. """
  94. Close all pooled connections and disable the pool.
  95. """
  96. # This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252
  97. _blocking_errnos = {errno.EAGAIN, errno.EWOULDBLOCK}
  98. class HTTPConnectionPool(ConnectionPool, RequestMethods):
  99. """
  100. Thread-safe connection pool for one host.
  101. :param host:
  102. Host used for this HTTP Connection (e.g. "localhost"), passed into
  103. :class:`http.client.HTTPConnection`.
  104. :param port:
  105. Port used for this HTTP Connection (None is equivalent to 80), passed
  106. into :class:`http.client.HTTPConnection`.
  107. :param timeout:
  108. Socket timeout in seconds for each individual connection. This can
  109. be a float or integer, which sets the timeout for the HTTP request,
  110. or an instance of :class:`urllib3.util.Timeout` which gives you more
  111. fine-grained control over request timeouts. After the constructor has
  112. been parsed, this is always a `urllib3.util.Timeout` object.
  113. :param maxsize:
  114. Number of connections to save that can be reused. More than 1 is useful
  115. in multithreaded situations. If ``block`` is set to False, more
  116. connections will be created but they will not be saved once they've
  117. been used.
  118. :param block:
  119. If set to True, no more than ``maxsize`` connections will be used at
  120. a time. When no free connections are available, the call will block
  121. until a connection has been released. This is a useful side effect for
  122. particular multithreaded situations where one does not want to use more
  123. than maxsize connections per host to prevent flooding.
  124. :param headers:
  125. Headers to include with all requests, unless other headers are given
  126. explicitly.
  127. :param retries:
  128. Retry configuration to use by default with requests in this pool.
  129. :param _proxy:
  130. Parsed proxy URL, should not be used directly, instead, see
  131. :class:`urllib3.ProxyManager`
  132. :param _proxy_headers:
  133. A dictionary with proxy headers, should not be used directly,
  134. instead, see :class:`urllib3.ProxyManager`
  135. :param \\**conn_kw:
  136. Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`,
  137. :class:`urllib3.connection.HTTPSConnection` instances.
  138. """
  139. scheme = "http"
  140. ConnectionCls: type[BaseHTTPConnection] | type[BaseHTTPSConnection] = HTTPConnection
  141. def __init__(
  142. self,
  143. host: str,
  144. port: int | None = None,
  145. timeout: _TYPE_TIMEOUT | None = _DEFAULT_TIMEOUT,
  146. maxsize: int = 1,
  147. block: bool = False,
  148. headers: typing.Mapping[str, str] | None = None,
  149. retries: Retry | bool | int | None = None,
  150. _proxy: Url | None = None,
  151. _proxy_headers: typing.Mapping[str, str] | None = None,
  152. _proxy_config: ProxyConfig | None = None,
  153. **conn_kw: typing.Any,
  154. ):
  155. ConnectionPool.__init__(self, host, port)
  156. RequestMethods.__init__(self, headers)
  157. if not isinstance(timeout, Timeout):
  158. timeout = Timeout.from_float(timeout)
  159. if retries is None:
  160. retries = Retry.DEFAULT
  161. self.timeout = timeout
  162. self.retries = retries
  163. self.pool: queue.LifoQueue[typing.Any] | None = self.QueueCls(maxsize)
  164. self.block = block
  165. self.proxy = _proxy
  166. self.proxy_headers = _proxy_headers or {}
  167. self.proxy_config = _proxy_config
  168. # Fill the queue up so that doing get() on it will block properly
  169. for _ in range(maxsize):
  170. self.pool.put(None)
  171. # These are mostly for testing and debugging purposes.
  172. self.num_connections = 0
  173. self.num_requests = 0
  174. self.conn_kw = conn_kw
  175. if self.proxy:
  176. # Enable Nagle's algorithm for proxies, to avoid packet fragmentation.
  177. # We cannot know if the user has added default socket options, so we cannot replace the
  178. # list.
  179. self.conn_kw.setdefault("socket_options", [])
  180. self.conn_kw["proxy"] = self.proxy
  181. self.conn_kw["proxy_config"] = self.proxy_config
  182. # Do not pass 'self' as callback to 'finalize'.
  183. # Then the 'finalize' would keep an endless living (leak) to self.
  184. # By just passing a reference to the pool allows the garbage collector
  185. # to free self if nobody else has a reference to it.
  186. pool = self.pool
  187. # Close all the HTTPConnections in the pool before the
  188. # HTTPConnectionPool object is garbage collected.
  189. weakref.finalize(self, _close_pool_connections, pool)
  190. def _new_conn(self) -> BaseHTTPConnection:
  191. """
  192. Return a fresh :class:`HTTPConnection`.
  193. """
  194. self.num_connections += 1
  195. log.debug(
  196. "Starting new HTTP connection (%d): %s:%s",
  197. self.num_connections,
  198. self.host,
  199. self.port or "80",
  200. )
  201. conn = self.ConnectionCls(
  202. host=self.host,
  203. port=self.port,
  204. timeout=self.timeout.connect_timeout,
  205. **self.conn_kw,
  206. )
  207. return conn
  208. def _get_conn(self, timeout: float | None = None) -> BaseHTTPConnection:
  209. """
  210. Get a connection. Will return a pooled connection if one is available.
  211. If no connections are available and :prop:`.block` is ``False``, then a
  212. fresh connection is returned.
  213. :param timeout:
  214. Seconds to wait before giving up and raising
  215. :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and
  216. :prop:`.block` is ``True``.
  217. """
  218. conn = None
  219. if self.pool is None:
  220. raise ClosedPoolError(self, "Pool is closed.")
  221. try:
  222. conn = self.pool.get(block=self.block, timeout=timeout)
  223. except AttributeError: # self.pool is None
  224. raise ClosedPoolError(self, "Pool is closed.") from None # Defensive:
  225. except queue.Empty:
  226. if self.block:
  227. raise EmptyPoolError(
  228. self,
  229. "Pool is empty and a new connection can't be opened due to blocking mode.",
  230. ) from None
  231. pass # Oh well, we'll create a new connection then
  232. # If this is a persistent connection, check if it got disconnected
  233. if conn and is_connection_dropped(conn):
  234. log.debug("Resetting dropped connection: %s", self.host)
  235. conn.close()
  236. return conn or self._new_conn()
  237. def _put_conn(self, conn: BaseHTTPConnection | None) -> None:
  238. """
  239. Put a connection back into the pool.
  240. :param conn:
  241. Connection object for the current host and port as returned by
  242. :meth:`._new_conn` or :meth:`._get_conn`.
  243. If the pool is already full, the connection is closed and discarded
  244. because we exceeded maxsize. If connections are discarded frequently,
  245. then maxsize should be increased.
  246. If the pool is closed, then the connection will be closed and discarded.
  247. """
  248. if self.pool is not None:
  249. try:
  250. self.pool.put(conn, block=False)
  251. return # Everything is dandy, done.
  252. except AttributeError:
  253. # self.pool is None.
  254. pass
  255. except queue.Full:
  256. # Connection never got put back into the pool, close it.
  257. if conn:
  258. conn.close()
  259. if self.block:
  260. # This should never happen if you got the conn from self._get_conn
  261. raise FullPoolError(
  262. self,
  263. "Pool reached maximum size and no more connections are allowed.",
  264. ) from None
  265. log.warning(
  266. "Connection pool is full, discarding connection: %s. Connection pool size: %s",
  267. self.host,
  268. self.pool.qsize(),
  269. )
  270. # Connection never got put back into the pool, close it.
  271. if conn:
  272. conn.close()
  273. def _validate_conn(self, conn: BaseHTTPConnection) -> None:
  274. """
  275. Called right before a request is made, after the socket is created.
  276. """
  277. def _prepare_proxy(self, conn: BaseHTTPConnection) -> None:
  278. # Nothing to do for HTTP connections.
  279. pass
  280. def _get_timeout(self, timeout: _TYPE_TIMEOUT) -> Timeout:
  281. """Helper that always returns a :class:`urllib3.util.Timeout`"""
  282. if timeout is _DEFAULT_TIMEOUT:
  283. return self.timeout.clone()
  284. if isinstance(timeout, Timeout):
  285. return timeout.clone()
  286. else:
  287. # User passed us an int/float. This is for backwards compatibility,
  288. # can be removed later
  289. return Timeout.from_float(timeout)
  290. def _raise_timeout(
  291. self,
  292. err: BaseSSLError | OSError | SocketTimeout,
  293. url: str,
  294. timeout_value: _TYPE_TIMEOUT | None,
  295. ) -> None:
  296. """Is the error actually a timeout? Will raise a ReadTimeout or pass"""
  297. if isinstance(err, SocketTimeout):
  298. raise ReadTimeoutError(
  299. self, url, f"Read timed out. (read timeout={timeout_value})"
  300. ) from err
  301. # See the above comment about EAGAIN in Python 3.
  302. if hasattr(err, "errno") and err.errno in _blocking_errnos:
  303. raise ReadTimeoutError(
  304. self, url, f"Read timed out. (read timeout={timeout_value})"
  305. ) from err
  306. def _make_request(
  307. self,
  308. conn: BaseHTTPConnection,
  309. method: str,
  310. url: str,
  311. body: _TYPE_BODY | None = None,
  312. headers: typing.Mapping[str, str] | None = None,
  313. retries: Retry | None = None,
  314. timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
  315. chunked: bool = False,
  316. response_conn: BaseHTTPConnection | None = None,
  317. preload_content: bool = True,
  318. decode_content: bool = True,
  319. enforce_content_length: bool = True,
  320. ) -> BaseHTTPResponse:
  321. """
  322. Perform a request on a given urllib connection object taken from our
  323. pool.
  324. :param conn:
  325. a connection from one of our connection pools
  326. :param method:
  327. HTTP request method (such as GET, POST, PUT, etc.)
  328. :param url:
  329. The URL to perform the request on.
  330. :param body:
  331. Data to send in the request body, either :class:`str`, :class:`bytes`,
  332. an iterable of :class:`str`/:class:`bytes`, or a file-like object.
  333. :param headers:
  334. Dictionary of custom headers to send, such as User-Agent,
  335. If-None-Match, etc. If None, pool headers are used. If provided,
  336. these headers completely replace any pool-specific headers.
  337. :param retries:
  338. Configure the number of retries to allow before raising a
  339. :class:`~urllib3.exceptions.MaxRetryError` exception.
  340. Pass ``None`` to retry until you receive a response. Pass a
  341. :class:`~urllib3.util.retry.Retry` object for fine-grained control
  342. over different types of retries.
  343. Pass an integer number to retry connection errors that many times,
  344. but no other types of errors. Pass zero to never retry.
  345. If ``False``, then retries are disabled and any exception is raised
  346. immediately. Also, instead of raising a MaxRetryError on redirects,
  347. the redirect response will be returned.
  348. :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
  349. :param timeout:
  350. If specified, overrides the default timeout for this one
  351. request. It may be a float (in seconds) or an instance of
  352. :class:`urllib3.util.Timeout`.
  353. :param chunked:
  354. If True, urllib3 will send the body using chunked transfer
  355. encoding. Otherwise, urllib3 will send the body using the standard
  356. content-length form. Defaults to False.
  357. :param response_conn:
  358. Set this to ``None`` if you will handle releasing the connection or
  359. set the connection to have the response release it.
  360. :param preload_content:
  361. If True, the response's body will be preloaded during construction.
  362. :param decode_content:
  363. If True, will attempt to decode the body based on the
  364. 'content-encoding' header.
  365. :param enforce_content_length:
  366. Enforce content length checking. Body returned by server must match
  367. value of Content-Length header, if present. Otherwise, raise error.
  368. """
  369. self.num_requests += 1
  370. timeout_obj = self._get_timeout(timeout)
  371. timeout_obj.start_connect()
  372. conn.timeout = Timeout.resolve_default_timeout(timeout_obj.connect_timeout)
  373. try:
  374. # Trigger any extra validation we need to do.
  375. try:
  376. self._validate_conn(conn)
  377. except (SocketTimeout, BaseSSLError) as e:
  378. self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
  379. raise
  380. # _validate_conn() starts the connection to an HTTPS proxy
  381. # so we need to wrap errors with 'ProxyError' here too.
  382. except (
  383. OSError,
  384. NewConnectionError,
  385. TimeoutError,
  386. BaseSSLError,
  387. CertificateError,
  388. SSLError,
  389. ) as e:
  390. new_e: Exception = e
  391. if isinstance(e, (BaseSSLError, CertificateError)):
  392. new_e = SSLError(e)
  393. # If the connection didn't successfully connect to it's proxy
  394. # then there
  395. if isinstance(
  396. new_e, (OSError, NewConnectionError, TimeoutError, SSLError)
  397. ) and (conn and conn.proxy and not conn.has_connected_to_proxy):
  398. new_e = _wrap_proxy_error(new_e, conn.proxy.scheme)
  399. raise new_e
  400. # conn.request() calls http.client.*.request, not the method in
  401. # urllib3.request. It also calls makefile (recv) on the socket.
  402. try:
  403. conn.request(
  404. method,
  405. url,
  406. body=body,
  407. headers=headers,
  408. chunked=chunked,
  409. preload_content=preload_content,
  410. decode_content=decode_content,
  411. enforce_content_length=enforce_content_length,
  412. )
  413. # We are swallowing BrokenPipeError (errno.EPIPE) since the server is
  414. # legitimately able to close the connection after sending a valid response.
  415. # With this behaviour, the received response is still readable.
  416. except BrokenPipeError:
  417. pass
  418. except OSError as e:
  419. # MacOS/Linux
  420. # EPROTOTYPE and ECONNRESET are needed on macOS
  421. # https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/
  422. # Condition changed later to emit ECONNRESET instead of only EPROTOTYPE.
  423. if e.errno != errno.EPROTOTYPE and e.errno != errno.ECONNRESET:
  424. raise
  425. # Reset the timeout for the recv() on the socket
  426. read_timeout = timeout_obj.read_timeout
  427. if not conn.is_closed:
  428. # In Python 3 socket.py will catch EAGAIN and return None when you
  429. # try and read into the file pointer created by http.client, which
  430. # instead raises a BadStatusLine exception. Instead of catching
  431. # the exception and assuming all BadStatusLine exceptions are read
  432. # timeouts, check for a zero timeout before making the request.
  433. if read_timeout == 0:
  434. raise ReadTimeoutError(
  435. self, url, f"Read timed out. (read timeout={read_timeout})"
  436. )
  437. conn.timeout = read_timeout
  438. # Receive the response from the server
  439. try:
  440. response = conn.getresponse()
  441. except (BaseSSLError, OSError) as e:
  442. self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
  443. raise
  444. # Set properties that are used by the pooling layer.
  445. response.retries = retries
  446. response._connection = response_conn # type: ignore[attr-defined]
  447. response._pool = self # type: ignore[attr-defined]
  448. log.debug(
  449. '%s://%s:%s "%s %s %s" %s %s',
  450. self.scheme,
  451. self.host,
  452. self.port,
  453. method,
  454. url,
  455. response.version_string,
  456. response.status,
  457. response.length_remaining,
  458. )
  459. return response
  460. def close(self) -> None:
  461. """
  462. Close all pooled connections and disable the pool.
  463. """
  464. if self.pool is None:
  465. return
  466. # Disable access to the pool
  467. old_pool, self.pool = self.pool, None
  468. # Close all the HTTPConnections in the pool.
  469. _close_pool_connections(old_pool)
  470. def is_same_host(self, url: str) -> bool:
  471. """
  472. Check if the given ``url`` is a member of the same host as this
  473. connection pool.
  474. """
  475. if url.startswith("/"):
  476. return True
  477. # TODO: Add optional support for socket.gethostbyname checking.
  478. scheme, _, host, port, *_ = parse_url(url)
  479. scheme = scheme or "http"
  480. if host is not None:
  481. host = _normalize_host(host, scheme=scheme)
  482. # Use explicit default port for comparison when none is given
  483. if self.port and not port:
  484. port = port_by_scheme.get(scheme)
  485. elif not self.port and port == port_by_scheme.get(scheme):
  486. port = None
  487. return (scheme, host, port) == (self.scheme, self.host, self.port)
  488. def urlopen( # type: ignore[override]
  489. self,
  490. method: str,
  491. url: str,
  492. body: _TYPE_BODY | None = None,
  493. headers: typing.Mapping[str, str] | None = None,
  494. retries: Retry | bool | int | None = None,
  495. redirect: bool = True,
  496. assert_same_host: bool = True,
  497. timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT,
  498. pool_timeout: int | None = None,
  499. release_conn: bool | None = None,
  500. chunked: bool = False,
  501. body_pos: _TYPE_BODY_POSITION | None = None,
  502. preload_content: bool = True,
  503. decode_content: bool = True,
  504. **response_kw: typing.Any,
  505. ) -> BaseHTTPResponse:
  506. """
  507. Get a connection from the pool and perform an HTTP request. This is the
  508. lowest level call for making a request, so you'll need to specify all
  509. the raw details.
  510. .. note::
  511. More commonly, it's appropriate to use a convenience method
  512. such as :meth:`request`.
  513. .. note::
  514. `release_conn` will only behave as expected if
  515. `preload_content=False` because we want to make
  516. `preload_content=False` the default behaviour someday soon without
  517. breaking backwards compatibility.
  518. :param method:
  519. HTTP request method (such as GET, POST, PUT, etc.)
  520. :param url:
  521. The URL to perform the request on.
  522. :param body:
  523. Data to send in the request body, either :class:`str`, :class:`bytes`,
  524. an iterable of :class:`str`/:class:`bytes`, or a file-like object.
  525. :param headers:
  526. Dictionary of custom headers to send, such as User-Agent,
  527. If-None-Match, etc. If None, pool headers are used. If provided,
  528. these headers completely replace any pool-specific headers.
  529. :param retries:
  530. Configure the number of retries to allow before raising a
  531. :class:`~urllib3.exceptions.MaxRetryError` exception.
  532. If ``None`` (default) will retry 3 times, see ``Retry.DEFAULT``. Pass a
  533. :class:`~urllib3.util.retry.Retry` object for fine-grained control
  534. over different types of retries.
  535. Pass an integer number to retry connection errors that many times,
  536. but no other types of errors. Pass zero to never retry.
  537. If ``False``, then retries are disabled and any exception is raised
  538. immediately. Also, instead of raising a MaxRetryError on redirects,
  539. the redirect response will be returned.
  540. :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
  541. :param redirect:
  542. If True, automatically handle redirects (status codes 301, 302,
  543. 303, 307, 308). Each redirect counts as a retry. Disabling retries
  544. will disable redirect, too.
  545. :param assert_same_host:
  546. If ``True``, will make sure that the host of the pool requests is
  547. consistent else will raise HostChangedError. When ``False``, you can
  548. use the pool on an HTTP proxy and request foreign hosts.
  549. :param timeout:
  550. If specified, overrides the default timeout for this one
  551. request. It may be a float (in seconds) or an instance of
  552. :class:`urllib3.util.Timeout`.
  553. :param pool_timeout:
  554. If set and the pool is set to block=True, then this method will
  555. block for ``pool_timeout`` seconds and raise EmptyPoolError if no
  556. connection is available within the time period.
  557. :param bool preload_content:
  558. If True, the response's body will be preloaded into memory.
  559. :param bool decode_content:
  560. If True, will attempt to decode the body based on the
  561. 'content-encoding' header.
  562. :param release_conn:
  563. If False, then the urlopen call will not release the connection
  564. back into the pool once a response is received (but will release if
  565. you read the entire contents of the response such as when
  566. `preload_content=True`). This is useful if you're not preloading
  567. the response's content immediately. You will need to call
  568. ``r.release_conn()`` on the response ``r`` to return the connection
  569. back into the pool. If None, it takes the value of ``preload_content``
  570. which defaults to ``True``.
  571. :param bool chunked:
  572. If True, urllib3 will send the body using chunked transfer
  573. encoding. Otherwise, urllib3 will send the body using the standard
  574. content-length form. Defaults to False.
  575. :param int body_pos:
  576. Position to seek to in file-like body in the event of a retry or
  577. redirect. Typically this won't need to be set because urllib3 will
  578. auto-populate the value when needed.
  579. """
  580. parsed_url = parse_url(url)
  581. destination_scheme = parsed_url.scheme
  582. if headers is None:
  583. headers = self.headers
  584. if not isinstance(retries, Retry):
  585. retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
  586. if release_conn is None:
  587. release_conn = preload_content
  588. # Check host
  589. if assert_same_host and not self.is_same_host(url):
  590. raise HostChangedError(self, url, retries)
  591. # Ensure that the URL we're connecting to is properly encoded
  592. if url.startswith("/"):
  593. url = to_str(_encode_target(url))
  594. else:
  595. url = to_str(parsed_url.url)
  596. conn = None
  597. # Track whether `conn` needs to be released before
  598. # returning/raising/recursing. Update this variable if necessary, and
  599. # leave `release_conn` constant throughout the function. That way, if
  600. # the function recurses, the original value of `release_conn` will be
  601. # passed down into the recursive call, and its value will be respected.
  602. #
  603. # See issue #651 [1] for details.
  604. #
  605. # [1] <https://github.com/urllib3/urllib3/issues/651>
  606. release_this_conn = release_conn
  607. http_tunnel_required = connection_requires_http_tunnel(
  608. self.proxy, self.proxy_config, destination_scheme
  609. )
  610. # Merge the proxy headers. Only done when not using HTTP CONNECT. We
  611. # have to copy the headers dict so we can safely change it without those
  612. # changes being reflected in anyone else's copy.
  613. if not http_tunnel_required:
  614. headers = headers.copy() # type: ignore[attr-defined]
  615. headers.update(self.proxy_headers) # type: ignore[union-attr]
  616. # Must keep the exception bound to a separate variable or else Python 3
  617. # complains about UnboundLocalError.
  618. err = None
  619. # Keep track of whether we cleanly exited the except block. This
  620. # ensures we do proper cleanup in finally.
  621. clean_exit = False
  622. # Rewind body position, if needed. Record current position
  623. # for future rewinds in the event of a redirect/retry.
  624. body_pos = set_file_position(body, body_pos)
  625. try:
  626. # Request a connection from the queue.
  627. timeout_obj = self._get_timeout(timeout)
  628. conn = self._get_conn(timeout=pool_timeout)
  629. conn.timeout = timeout_obj.connect_timeout # type: ignore[assignment]
  630. # Is this a closed/new connection that requires CONNECT tunnelling?
  631. if self.proxy is not None and http_tunnel_required and conn.is_closed:
  632. try:
  633. self._prepare_proxy(conn)
  634. except (BaseSSLError, OSError, SocketTimeout) as e:
  635. self._raise_timeout(
  636. err=e, url=self.proxy.url, timeout_value=conn.timeout
  637. )
  638. raise
  639. # If we're going to release the connection in ``finally:``, then
  640. # the response doesn't need to know about the connection. Otherwise
  641. # it will also try to release it and we'll have a double-release
  642. # mess.
  643. response_conn = conn if not release_conn else None
  644. # Make the request on the HTTPConnection object
  645. response = self._make_request(
  646. conn,
  647. method,
  648. url,
  649. timeout=timeout_obj,
  650. body=body,
  651. headers=headers,
  652. chunked=chunked,
  653. retries=retries,
  654. response_conn=response_conn,
  655. preload_content=preload_content,
  656. decode_content=decode_content,
  657. **response_kw,
  658. )
  659. # Everything went great!
  660. clean_exit = True
  661. except EmptyPoolError:
  662. # Didn't get a connection from the pool, no need to clean up
  663. clean_exit = True
  664. release_this_conn = False
  665. raise
  666. except (
  667. TimeoutError,
  668. HTTPException,
  669. OSError,
  670. ProtocolError,
  671. BaseSSLError,
  672. SSLError,
  673. CertificateError,
  674. ProxyError,
  675. ) as e:
  676. # Discard the connection for these exceptions. It will be
  677. # replaced during the next _get_conn() call.
  678. clean_exit = False
  679. new_e: Exception = e
  680. if isinstance(e, (BaseSSLError, CertificateError)):
  681. new_e = SSLError(e)
  682. if isinstance(
  683. new_e,
  684. (
  685. OSError,
  686. NewConnectionError,
  687. TimeoutError,
  688. SSLError,
  689. HTTPException,
  690. ),
  691. ) and (conn and conn.proxy and not conn.has_connected_to_proxy):
  692. new_e = _wrap_proxy_error(new_e, conn.proxy.scheme)
  693. elif isinstance(new_e, (OSError, HTTPException)):
  694. new_e = ProtocolError("Connection aborted.", new_e)
  695. retries = retries.increment(
  696. method, url, error=new_e, _pool=self, _stacktrace=sys.exc_info()[2]
  697. )
  698. retries.sleep()
  699. # Keep track of the error for the retry warning.
  700. err = e
  701. finally:
  702. if not clean_exit:
  703. # We hit some kind of exception, handled or otherwise. We need
  704. # to throw the connection away unless explicitly told not to.
  705. # Close the connection, set the variable to None, and make sure
  706. # we put the None back in the pool to avoid leaking it.
  707. if conn:
  708. conn.close()
  709. conn = None
  710. release_this_conn = True
  711. if release_this_conn:
  712. # Put the connection back to be reused. If the connection is
  713. # expired then it will be None, which will get replaced with a
  714. # fresh connection during _get_conn.
  715. self._put_conn(conn)
  716. if not conn:
  717. # Try again
  718. log.warning(
  719. "Retrying (%r) after connection broken by '%r': %s", retries, err, url
  720. )
  721. return self.urlopen(
  722. method,
  723. url,
  724. body,
  725. headers,
  726. retries,
  727. redirect,
  728. assert_same_host,
  729. timeout=timeout,
  730. pool_timeout=pool_timeout,
  731. release_conn=release_conn,
  732. chunked=chunked,
  733. body_pos=body_pos,
  734. preload_content=preload_content,
  735. decode_content=decode_content,
  736. **response_kw,
  737. )
  738. # Handle redirect?
  739. redirect_location = redirect and response.get_redirect_location()
  740. if redirect_location:
  741. if response.status == 303:
  742. # Change the method according to RFC 9110, Section 15.4.4.
  743. method = "GET"
  744. # And lose the body not to transfer anything sensitive.
  745. body = None
  746. headers = HTTPHeaderDict(headers)._prepare_for_method_change()
  747. try:
  748. retries = retries.increment(method, url, response=response, _pool=self)
  749. except MaxRetryError:
  750. if retries.raise_on_redirect:
  751. response.drain_conn()
  752. raise
  753. return response
  754. response.drain_conn()
  755. retries.sleep_for_retry(response)
  756. log.debug("Redirecting %s -> %s", url, redirect_location)
  757. return self.urlopen(
  758. method,
  759. redirect_location,
  760. body,
  761. headers,
  762. retries=retries,
  763. redirect=redirect,
  764. assert_same_host=assert_same_host,
  765. timeout=timeout,
  766. pool_timeout=pool_timeout,
  767. release_conn=release_conn,
  768. chunked=chunked,
  769. body_pos=body_pos,
  770. preload_content=preload_content,
  771. decode_content=decode_content,
  772. **response_kw,
  773. )
  774. # Check if we should retry the HTTP response.
  775. has_retry_after = bool(response.headers.get("Retry-After"))
  776. if retries.is_retry(method, response.status, has_retry_after):
  777. try:
  778. retries = retries.increment(method, url, response=response, _pool=self)
  779. except MaxRetryError:
  780. if retries.raise_on_status:
  781. response.drain_conn()
  782. raise
  783. return response
  784. response.drain_conn()
  785. retries.sleep(response)
  786. log.debug("Retry: %s", url)
  787. return self.urlopen(
  788. method,
  789. url,
  790. body,
  791. headers,
  792. retries=retries,
  793. redirect=redirect,
  794. assert_same_host=assert_same_host,
  795. timeout=timeout,
  796. pool_timeout=pool_timeout,
  797. release_conn=release_conn,
  798. chunked=chunked,
  799. body_pos=body_pos,
  800. preload_content=preload_content,
  801. decode_content=decode_content,
  802. **response_kw,
  803. )
  804. return response
  805. class HTTPSConnectionPool(HTTPConnectionPool):
  806. """
  807. Same as :class:`.HTTPConnectionPool`, but HTTPS.
  808. :class:`.HTTPSConnection` uses one of ``assert_fingerprint``,
  809. ``assert_hostname`` and ``host`` in this order to verify connections.
  810. If ``assert_hostname`` is False, no verification is done.
  811. The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``,
  812. ``ca_cert_dir``, ``ssl_version``, ``key_password`` are only used if :mod:`ssl`
  813. is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade
  814. the connection socket into an SSL socket.
  815. """
  816. scheme = "https"
  817. ConnectionCls: type[BaseHTTPSConnection] = HTTPSConnection
  818. def __init__(
  819. self,
  820. host: str,
  821. port: int | None = None,
  822. timeout: _TYPE_TIMEOUT | None = _DEFAULT_TIMEOUT,
  823. maxsize: int = 1,
  824. block: bool = False,
  825. headers: typing.Mapping[str, str] | None = None,
  826. retries: Retry | bool | int | None = None,
  827. _proxy: Url | None = None,
  828. _proxy_headers: typing.Mapping[str, str] | None = None,
  829. key_file: str | None = None,
  830. cert_file: str | None = None,
  831. cert_reqs: int | str | None = None,
  832. key_password: str | None = None,
  833. ca_certs: str | None = None,
  834. ssl_version: int | str | None = None,
  835. ssl_minimum_version: ssl.TLSVersion | None = None,
  836. ssl_maximum_version: ssl.TLSVersion | None = None,
  837. assert_hostname: str | typing.Literal[False] | None = None,
  838. assert_fingerprint: str | None = None,
  839. ca_cert_dir: str | None = None,
  840. **conn_kw: typing.Any,
  841. ) -> None:
  842. super().__init__(
  843. host,
  844. port,
  845. timeout,
  846. maxsize,
  847. block,
  848. headers,
  849. retries,
  850. _proxy,
  851. _proxy_headers,
  852. **conn_kw,
  853. )
  854. self.key_file = key_file
  855. self.cert_file = cert_file
  856. self.cert_reqs = cert_reqs
  857. self.key_password = key_password
  858. self.ca_certs = ca_certs
  859. self.ca_cert_dir = ca_cert_dir
  860. self.ssl_version = ssl_version
  861. self.ssl_minimum_version = ssl_minimum_version
  862. self.ssl_maximum_version = ssl_maximum_version
  863. self.assert_hostname = assert_hostname
  864. self.assert_fingerprint = assert_fingerprint
  865. def _prepare_proxy(self, conn: HTTPSConnection) -> None: # type: ignore[override]
  866. """Establishes a tunnel connection through HTTP CONNECT."""
  867. if self.proxy and self.proxy.scheme == "https":
  868. tunnel_scheme = "https"
  869. else:
  870. tunnel_scheme = "http"
  871. conn.set_tunnel(
  872. scheme=tunnel_scheme,
  873. host=self._tunnel_host,
  874. port=self.port,
  875. headers=self.proxy_headers,
  876. )
  877. conn.connect()
  878. def _new_conn(self) -> BaseHTTPSConnection:
  879. """
  880. Return a fresh :class:`urllib3.connection.HTTPConnection`.
  881. """
  882. self.num_connections += 1
  883. log.debug(
  884. "Starting new HTTPS connection (%d): %s:%s",
  885. self.num_connections,
  886. self.host,
  887. self.port or "443",
  888. )
  889. if not self.ConnectionCls or self.ConnectionCls is DummyConnection: # type: ignore[comparison-overlap]
  890. raise ImportError(
  891. "Can't connect to HTTPS URL because the SSL module is not available."
  892. )
  893. actual_host: str = self.host
  894. actual_port = self.port
  895. if self.proxy is not None and self.proxy.host is not None:
  896. actual_host = self.proxy.host
  897. actual_port = self.proxy.port
  898. return self.ConnectionCls(
  899. host=actual_host,
  900. port=actual_port,
  901. timeout=self.timeout.connect_timeout,
  902. cert_file=self.cert_file,
  903. key_file=self.key_file,
  904. key_password=self.key_password,
  905. cert_reqs=self.cert_reqs,
  906. ca_certs=self.ca_certs,
  907. ca_cert_dir=self.ca_cert_dir,
  908. assert_hostname=self.assert_hostname,
  909. assert_fingerprint=self.assert_fingerprint,
  910. ssl_version=self.ssl_version,
  911. ssl_minimum_version=self.ssl_minimum_version,
  912. ssl_maximum_version=self.ssl_maximum_version,
  913. **self.conn_kw,
  914. )
  915. def _validate_conn(self, conn: BaseHTTPConnection) -> None:
  916. """
  917. Called right before a request is made, after the socket is created.
  918. """
  919. super()._validate_conn(conn)
  920. # Force connect early to allow us to validate the connection.
  921. if conn.is_closed:
  922. conn.connect()
  923. # TODO revise this, see https://github.com/urllib3/urllib3/issues/2791
  924. if not conn.is_verified and not conn.proxy_is_verified:
  925. warnings.warn(
  926. (
  927. f"Unverified HTTPS request is being made to host '{conn.host}'. "
  928. "Adding certificate verification is strongly advised. See: "
  929. "https://urllib3.readthedocs.io/en/latest/advanced-usage.html"
  930. "#tls-warnings"
  931. ),
  932. InsecureRequestWarning,
  933. )
  934. def connection_from_url(url: str, **kw: typing.Any) -> HTTPConnectionPool:
  935. """
  936. Given a url, return an :class:`.ConnectionPool` instance of its host.
  937. This is a shortcut for not having to parse out the scheme, host, and port
  938. of the url before creating an :class:`.ConnectionPool` instance.
  939. :param url:
  940. Absolute URL string that must include the scheme. Port is optional.
  941. :param \\**kw:
  942. Passes additional parameters to the constructor of the appropriate
  943. :class:`.ConnectionPool`. Useful for specifying things like
  944. timeout, maxsize, headers, etc.
  945. Example::
  946. >>> conn = connection_from_url('http://google.com/')
  947. >>> r = conn.request('GET', '/')
  948. """
  949. scheme, _, host, port, *_ = parse_url(url)
  950. scheme = scheme or "http"
  951. port = port or port_by_scheme.get(scheme, 80)
  952. if scheme == "https":
  953. return HTTPSConnectionPool(host, port=port, **kw) # type: ignore[arg-type]
  954. else:
  955. return HTTPConnectionPool(host, port=port, **kw) # type: ignore[arg-type]
  956. @typing.overload
  957. def _normalize_host(host: None, scheme: str | None) -> None: ...
  958. @typing.overload
  959. def _normalize_host(host: str, scheme: str | None) -> str: ...
  960. def _normalize_host(host: str | None, scheme: str | None) -> str | None:
  961. """
  962. Normalize hosts for comparisons and use with sockets.
  963. """
  964. host = normalize_host(host, scheme)
  965. # httplib doesn't like it when we include brackets in IPv6 addresses
  966. # Specifically, if we include brackets but also pass the port then
  967. # httplib crazily doubles up the square brackets on the Host header.
  968. # Instead, we need to make sure we never pass ``None`` as the port.
  969. # However, for backward compatibility reasons we can't actually
  970. # *assert* that. See http://bugs.python.org/issue28539
  971. if host and host.startswith("[") and host.endswith("]"):
  972. host = host[1:-1]
  973. return host
  974. def _url_from_pool(
  975. pool: HTTPConnectionPool | HTTPSConnectionPool, path: str | None = None
  976. ) -> str:
  977. """Returns the URL from a given connection pool. This is mainly used for testing and logging."""
  978. return Url(scheme=pool.scheme, host=pool.host, port=pool.port, path=path).url
  979. def _close_pool_connections(pool: queue.LifoQueue[typing.Any]) -> None:
  980. """Drains a queue of connections and closes each one."""
  981. try:
  982. while True:
  983. conn = pool.get(block=False)
  984. if conn:
  985. conn.close()
  986. except queue.Empty:
  987. pass # Done.