jpayne@7: from __future__ import annotations jpayne@7: jpayne@7: import datetime jpayne@7: import logging jpayne@7: import os jpayne@7: import re jpayne@7: import socket jpayne@7: import sys jpayne@7: import typing jpayne@7: import warnings jpayne@7: from http.client import HTTPConnection as _HTTPConnection jpayne@7: from http.client import HTTPException as HTTPException # noqa: F401 jpayne@7: from http.client import ResponseNotReady jpayne@7: from socket import timeout as SocketTimeout jpayne@7: jpayne@7: if typing.TYPE_CHECKING: jpayne@7: from typing import Literal jpayne@7: jpayne@7: from .response import HTTPResponse jpayne@7: from .util.ssl_ import _TYPE_PEER_CERT_RET_DICT jpayne@7: from .util.ssltransport import SSLTransport jpayne@7: jpayne@7: from ._collections import HTTPHeaderDict jpayne@7: from .util.response import assert_header_parsing jpayne@7: from .util.timeout import _DEFAULT_TIMEOUT, _TYPE_TIMEOUT, Timeout jpayne@7: from .util.util import to_str jpayne@7: from .util.wait import wait_for_read jpayne@7: jpayne@7: try: # Compiled with SSL? jpayne@7: import ssl jpayne@7: jpayne@7: BaseSSLError = ssl.SSLError jpayne@7: except (ImportError, AttributeError): jpayne@7: ssl = None # type: ignore[assignment] jpayne@7: jpayne@7: class BaseSSLError(BaseException): # type: ignore[no-redef] jpayne@7: pass jpayne@7: jpayne@7: jpayne@7: from ._base_connection import _TYPE_BODY jpayne@7: from ._base_connection import ProxyConfig as ProxyConfig jpayne@7: from ._base_connection import _ResponseOptions as _ResponseOptions jpayne@7: from ._version import __version__ jpayne@7: from .exceptions import ( jpayne@7: ConnectTimeoutError, jpayne@7: HeaderParsingError, jpayne@7: NameResolutionError, jpayne@7: NewConnectionError, jpayne@7: ProxyError, jpayne@7: SystemTimeWarning, jpayne@7: ) jpayne@7: from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection, ssl_ jpayne@7: from .util.request import body_to_chunks jpayne@7: from .util.ssl_ import assert_fingerprint as _assert_fingerprint jpayne@7: from .util.ssl_ import ( jpayne@7: create_urllib3_context, jpayne@7: is_ipaddress, jpayne@7: resolve_cert_reqs, jpayne@7: resolve_ssl_version, jpayne@7: ssl_wrap_socket, jpayne@7: ) jpayne@7: from .util.ssl_match_hostname import CertificateError, match_hostname jpayne@7: from .util.url import Url jpayne@7: jpayne@7: # Not a no-op, we're adding this to the namespace so it can be imported. jpayne@7: ConnectionError = ConnectionError jpayne@7: BrokenPipeError = BrokenPipeError jpayne@7: jpayne@7: jpayne@7: log = logging.getLogger(__name__) jpayne@7: jpayne@7: port_by_scheme = {"http": 80, "https": 443} jpayne@7: jpayne@7: # When it comes time to update this value as a part of regular maintenance jpayne@7: # (ie test_recent_date is failing) update it to ~6 months before the current date. jpayne@7: RECENT_DATE = datetime.date(2023, 6, 1) jpayne@7: jpayne@7: _CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]") jpayne@7: jpayne@7: _HAS_SYS_AUDIT = hasattr(sys, "audit") jpayne@7: jpayne@7: jpayne@7: class HTTPConnection(_HTTPConnection): jpayne@7: """ jpayne@7: Based on :class:`http.client.HTTPConnection` but provides an extra constructor jpayne@7: backwards-compatibility layer between older and newer Pythons. jpayne@7: jpayne@7: Additional keyword parameters are used to configure attributes of the connection. jpayne@7: Accepted parameters include: jpayne@7: jpayne@7: - ``source_address``: Set the source address for the current connection. jpayne@7: - ``socket_options``: Set specific options on the underlying socket. If not specified, then jpayne@7: defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling jpayne@7: Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy. jpayne@7: jpayne@7: For example, if you wish to enable TCP Keep Alive in addition to the defaults, jpayne@7: you might pass: jpayne@7: jpayne@7: .. code-block:: python jpayne@7: jpayne@7: HTTPConnection.default_socket_options + [ jpayne@7: (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), jpayne@7: ] jpayne@7: jpayne@7: Or you may want to disable the defaults by passing an empty list (e.g., ``[]``). jpayne@7: """ jpayne@7: jpayne@7: default_port: typing.ClassVar[int] = port_by_scheme["http"] # type: ignore[misc] jpayne@7: jpayne@7: #: Disable Nagle's algorithm by default. jpayne@7: #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]`` jpayne@7: default_socket_options: typing.ClassVar[connection._TYPE_SOCKET_OPTIONS] = [ jpayne@7: (socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) jpayne@7: ] jpayne@7: jpayne@7: #: Whether this connection verifies the host's certificate. jpayne@7: is_verified: bool = False jpayne@7: jpayne@7: #: Whether this proxy connection verified the proxy host's certificate. jpayne@7: # If no proxy is currently connected to the value will be ``None``. jpayne@7: proxy_is_verified: bool | None = None jpayne@7: jpayne@7: blocksize: int jpayne@7: source_address: tuple[str, int] | None jpayne@7: socket_options: connection._TYPE_SOCKET_OPTIONS | None jpayne@7: jpayne@7: _has_connected_to_proxy: bool jpayne@7: _response_options: _ResponseOptions | None jpayne@7: _tunnel_host: str | None jpayne@7: _tunnel_port: int | None jpayne@7: _tunnel_scheme: str | None jpayne@7: jpayne@7: def __init__( jpayne@7: self, jpayne@7: host: str, jpayne@7: port: int | None = None, jpayne@7: *, jpayne@7: timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, jpayne@7: source_address: tuple[str, int] | None = None, jpayne@7: blocksize: int = 16384, jpayne@7: socket_options: None jpayne@7: | (connection._TYPE_SOCKET_OPTIONS) = default_socket_options, jpayne@7: proxy: Url | None = None, jpayne@7: proxy_config: ProxyConfig | None = None, jpayne@7: ) -> None: jpayne@7: super().__init__( jpayne@7: host=host, jpayne@7: port=port, jpayne@7: timeout=Timeout.resolve_default_timeout(timeout), jpayne@7: source_address=source_address, jpayne@7: blocksize=blocksize, jpayne@7: ) jpayne@7: self.socket_options = socket_options jpayne@7: self.proxy = proxy jpayne@7: self.proxy_config = proxy_config jpayne@7: jpayne@7: self._has_connected_to_proxy = False jpayne@7: self._response_options = None jpayne@7: self._tunnel_host: str | None = None jpayne@7: self._tunnel_port: int | None = None jpayne@7: self._tunnel_scheme: str | None = None jpayne@7: jpayne@7: @property jpayne@7: def host(self) -> str: jpayne@7: """ jpayne@7: Getter method to remove any trailing dots that indicate the hostname is an FQDN. jpayne@7: jpayne@7: In general, SSL certificates don't include the trailing dot indicating a jpayne@7: fully-qualified domain name, and thus, they don't validate properly when jpayne@7: checked against a domain name that includes the dot. In addition, some jpayne@7: servers may not expect to receive the trailing dot when provided. jpayne@7: jpayne@7: However, the hostname with trailing dot is critical to DNS resolution; doing a jpayne@7: lookup with the trailing dot will properly only resolve the appropriate FQDN, jpayne@7: whereas a lookup without a trailing dot will search the system's search domain jpayne@7: list. Thus, it's important to keep the original host around for use only in jpayne@7: those cases where it's appropriate (i.e., when doing DNS lookup to establish the jpayne@7: actual TCP connection across which we're going to send HTTP requests). jpayne@7: """ jpayne@7: return self._dns_host.rstrip(".") jpayne@7: jpayne@7: @host.setter jpayne@7: def host(self, value: str) -> None: jpayne@7: """ jpayne@7: Setter for the `host` property. jpayne@7: jpayne@7: We assume that only urllib3 uses the _dns_host attribute; httplib itself jpayne@7: only uses `host`, and it seems reasonable that other libraries follow suit. jpayne@7: """ jpayne@7: self._dns_host = value jpayne@7: jpayne@7: def _new_conn(self) -> socket.socket: jpayne@7: """Establish a socket connection and set nodelay settings on it. jpayne@7: jpayne@7: :return: New socket connection. jpayne@7: """ jpayne@7: try: jpayne@7: sock = connection.create_connection( jpayne@7: (self._dns_host, self.port), jpayne@7: self.timeout, jpayne@7: source_address=self.source_address, jpayne@7: socket_options=self.socket_options, jpayne@7: ) jpayne@7: except socket.gaierror as e: jpayne@7: raise NameResolutionError(self.host, self, e) from e jpayne@7: except SocketTimeout as e: jpayne@7: raise ConnectTimeoutError( jpayne@7: self, jpayne@7: f"Connection to {self.host} timed out. (connect timeout={self.timeout})", jpayne@7: ) from e jpayne@7: jpayne@7: except OSError as e: jpayne@7: raise NewConnectionError( jpayne@7: self, f"Failed to establish a new connection: {e}" jpayne@7: ) from e jpayne@7: jpayne@7: # Audit hooks are only available in Python 3.8+ jpayne@7: if _HAS_SYS_AUDIT: jpayne@7: sys.audit("http.client.connect", self, self.host, self.port) jpayne@7: jpayne@7: return sock jpayne@7: jpayne@7: def set_tunnel( jpayne@7: self, jpayne@7: host: str, jpayne@7: port: int | None = None, jpayne@7: headers: typing.Mapping[str, str] | None = None, jpayne@7: scheme: str = "http", jpayne@7: ) -> None: jpayne@7: if scheme not in ("http", "https"): jpayne@7: raise ValueError( jpayne@7: f"Invalid proxy scheme for tunneling: {scheme!r}, must be either 'http' or 'https'" jpayne@7: ) jpayne@7: super().set_tunnel(host, port=port, headers=headers) jpayne@7: self._tunnel_scheme = scheme jpayne@7: jpayne@7: def connect(self) -> None: jpayne@7: self.sock = self._new_conn() jpayne@7: if self._tunnel_host: jpayne@7: # If we're tunneling it means we're connected to our proxy. jpayne@7: self._has_connected_to_proxy = True jpayne@7: jpayne@7: # TODO: Fix tunnel so it doesn't depend on self.sock state. jpayne@7: self._tunnel() # type: ignore[attr-defined] jpayne@7: jpayne@7: # If there's a proxy to be connected to we are fully connected. jpayne@7: # This is set twice (once above and here) due to forwarding proxies jpayne@7: # not using tunnelling. jpayne@7: self._has_connected_to_proxy = bool(self.proxy) jpayne@7: jpayne@7: if self._has_connected_to_proxy: jpayne@7: self.proxy_is_verified = False jpayne@7: jpayne@7: @property jpayne@7: def is_closed(self) -> bool: jpayne@7: return self.sock is None jpayne@7: jpayne@7: @property jpayne@7: def is_connected(self) -> bool: jpayne@7: if self.sock is None: jpayne@7: return False jpayne@7: return not wait_for_read(self.sock, timeout=0.0) jpayne@7: jpayne@7: @property jpayne@7: def has_connected_to_proxy(self) -> bool: jpayne@7: return self._has_connected_to_proxy jpayne@7: jpayne@7: @property jpayne@7: def proxy_is_forwarding(self) -> bool: jpayne@7: """ jpayne@7: Return True if a forwarding proxy is configured, else return False jpayne@7: """ jpayne@7: return bool(self.proxy) and self._tunnel_host is None jpayne@7: jpayne@7: def close(self) -> None: jpayne@7: try: jpayne@7: super().close() jpayne@7: finally: jpayne@7: # Reset all stateful properties so connection jpayne@7: # can be re-used without leaking prior configs. jpayne@7: self.sock = None jpayne@7: self.is_verified = False jpayne@7: self.proxy_is_verified = None jpayne@7: self._has_connected_to_proxy = False jpayne@7: self._response_options = None jpayne@7: self._tunnel_host = None jpayne@7: self._tunnel_port = None jpayne@7: self._tunnel_scheme = None jpayne@7: jpayne@7: def putrequest( jpayne@7: self, jpayne@7: method: str, jpayne@7: url: str, jpayne@7: skip_host: bool = False, jpayne@7: skip_accept_encoding: bool = False, jpayne@7: ) -> None: jpayne@7: """""" jpayne@7: # Empty docstring because the indentation of CPython's implementation jpayne@7: # is broken but we don't want this method in our documentation. jpayne@7: match = _CONTAINS_CONTROL_CHAR_RE.search(method) jpayne@7: if match: jpayne@7: raise ValueError( jpayne@7: f"Method cannot contain non-token characters {method!r} (found at least {match.group()!r})" jpayne@7: ) jpayne@7: jpayne@7: return super().putrequest( jpayne@7: method, url, skip_host=skip_host, skip_accept_encoding=skip_accept_encoding jpayne@7: ) jpayne@7: jpayne@7: def putheader(self, header: str, *values: str) -> None: # type: ignore[override] jpayne@7: """""" jpayne@7: if not any(isinstance(v, str) and v == SKIP_HEADER for v in values): jpayne@7: super().putheader(header, *values) jpayne@7: elif to_str(header.lower()) not in SKIPPABLE_HEADERS: jpayne@7: skippable_headers = "', '".join( jpayne@7: [str.title(header) for header in sorted(SKIPPABLE_HEADERS)] jpayne@7: ) jpayne@7: raise ValueError( jpayne@7: f"urllib3.util.SKIP_HEADER only supports '{skippable_headers}'" jpayne@7: ) jpayne@7: jpayne@7: # `request` method's signature intentionally violates LSP. jpayne@7: # urllib3's API is different from `http.client.HTTPConnection` and the subclassing is only incidental. jpayne@7: def request( # type: ignore[override] jpayne@7: self, jpayne@7: method: str, jpayne@7: url: str, jpayne@7: body: _TYPE_BODY | None = None, jpayne@7: headers: typing.Mapping[str, str] | None = None, jpayne@7: *, jpayne@7: chunked: bool = False, jpayne@7: preload_content: bool = True, jpayne@7: decode_content: bool = True, jpayne@7: enforce_content_length: bool = True, jpayne@7: ) -> None: jpayne@7: # Update the inner socket's timeout value to send the request. jpayne@7: # This only triggers if the connection is re-used. jpayne@7: if self.sock is not None: jpayne@7: self.sock.settimeout(self.timeout) jpayne@7: jpayne@7: # Store these values to be fed into the HTTPResponse jpayne@7: # object later. TODO: Remove this in favor of a real jpayne@7: # HTTP lifecycle mechanism. jpayne@7: jpayne@7: # We have to store these before we call .request() jpayne@7: # because sometimes we can still salvage a response jpayne@7: # off the wire even if we aren't able to completely jpayne@7: # send the request body. jpayne@7: self._response_options = _ResponseOptions( jpayne@7: request_method=method, jpayne@7: request_url=url, jpayne@7: preload_content=preload_content, jpayne@7: decode_content=decode_content, jpayne@7: enforce_content_length=enforce_content_length, jpayne@7: ) jpayne@7: jpayne@7: if headers is None: jpayne@7: headers = {} jpayne@7: header_keys = frozenset(to_str(k.lower()) for k in headers) jpayne@7: skip_accept_encoding = "accept-encoding" in header_keys jpayne@7: skip_host = "host" in header_keys jpayne@7: self.putrequest( jpayne@7: method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host jpayne@7: ) jpayne@7: jpayne@7: # Transform the body into an iterable of sendall()-able chunks jpayne@7: # and detect if an explicit Content-Length is doable. jpayne@7: chunks_and_cl = body_to_chunks(body, method=method, blocksize=self.blocksize) jpayne@7: chunks = chunks_and_cl.chunks jpayne@7: content_length = chunks_and_cl.content_length jpayne@7: jpayne@7: # When chunked is explicit set to 'True' we respect that. jpayne@7: if chunked: jpayne@7: if "transfer-encoding" not in header_keys: jpayne@7: self.putheader("Transfer-Encoding", "chunked") jpayne@7: else: jpayne@7: # Detect whether a framing mechanism is already in use. If so jpayne@7: # we respect that value, otherwise we pick chunked vs content-length jpayne@7: # depending on the type of 'body'. jpayne@7: if "content-length" in header_keys: jpayne@7: chunked = False jpayne@7: elif "transfer-encoding" in header_keys: jpayne@7: chunked = True jpayne@7: jpayne@7: # Otherwise we go off the recommendation of 'body_to_chunks()'. jpayne@7: else: jpayne@7: chunked = False jpayne@7: if content_length is None: jpayne@7: if chunks is not None: jpayne@7: chunked = True jpayne@7: self.putheader("Transfer-Encoding", "chunked") jpayne@7: else: jpayne@7: self.putheader("Content-Length", str(content_length)) jpayne@7: jpayne@7: # Now that framing headers are out of the way we send all the other headers. jpayne@7: if "user-agent" not in header_keys: jpayne@7: self.putheader("User-Agent", _get_default_user_agent()) jpayne@7: for header, value in headers.items(): jpayne@7: self.putheader(header, value) jpayne@7: self.endheaders() jpayne@7: jpayne@7: # If we're given a body we start sending that in chunks. jpayne@7: if chunks is not None: jpayne@7: for chunk in chunks: jpayne@7: # Sending empty chunks isn't allowed for TE: chunked jpayne@7: # as it indicates the end of the body. jpayne@7: if not chunk: jpayne@7: continue jpayne@7: if isinstance(chunk, str): jpayne@7: chunk = chunk.encode("utf-8") jpayne@7: if chunked: jpayne@7: self.send(b"%x\r\n%b\r\n" % (len(chunk), chunk)) jpayne@7: else: jpayne@7: self.send(chunk) jpayne@7: jpayne@7: # Regardless of whether we have a body or not, if we're in jpayne@7: # chunked mode we want to send an explicit empty chunk. jpayne@7: if chunked: jpayne@7: self.send(b"0\r\n\r\n") jpayne@7: jpayne@7: def request_chunked( jpayne@7: self, jpayne@7: method: str, jpayne@7: url: str, jpayne@7: body: _TYPE_BODY | None = None, jpayne@7: headers: typing.Mapping[str, str] | None = None, jpayne@7: ) -> None: jpayne@7: """ jpayne@7: Alternative to the common request method, which sends the jpayne@7: body with chunked encoding and not as one block jpayne@7: """ jpayne@7: warnings.warn( jpayne@7: "HTTPConnection.request_chunked() is deprecated and will be removed " jpayne@7: "in urllib3 v2.1.0. Instead use HTTPConnection.request(..., chunked=True).", jpayne@7: category=DeprecationWarning, jpayne@7: stacklevel=2, jpayne@7: ) jpayne@7: self.request(method, url, body=body, headers=headers, chunked=True) jpayne@7: jpayne@7: def getresponse( # type: ignore[override] jpayne@7: self, jpayne@7: ) -> HTTPResponse: jpayne@7: """ jpayne@7: Get the response from the server. jpayne@7: jpayne@7: If the HTTPConnection is in the correct state, returns an instance of HTTPResponse or of whatever object is returned by the response_class variable. jpayne@7: jpayne@7: If a request has not been sent or if a previous response has not be handled, ResponseNotReady is raised. If the HTTP response indicates that the connection should be closed, then it will be closed before the response is returned. When the connection is closed, the underlying socket is closed. jpayne@7: """ jpayne@7: # Raise the same error as http.client.HTTPConnection jpayne@7: if self._response_options is None: jpayne@7: raise ResponseNotReady() jpayne@7: jpayne@7: # Reset this attribute for being used again. jpayne@7: resp_options = self._response_options jpayne@7: self._response_options = None jpayne@7: jpayne@7: # Since the connection's timeout value may have been updated jpayne@7: # we need to set the timeout on the socket. jpayne@7: self.sock.settimeout(self.timeout) jpayne@7: jpayne@7: # This is needed here to avoid circular import errors jpayne@7: from .response import HTTPResponse jpayne@7: jpayne@7: # Get the response from http.client.HTTPConnection jpayne@7: httplib_response = super().getresponse() jpayne@7: jpayne@7: try: jpayne@7: assert_header_parsing(httplib_response.msg) jpayne@7: except (HeaderParsingError, TypeError) as hpe: jpayne@7: log.warning( jpayne@7: "Failed to parse headers (url=%s): %s", jpayne@7: _url_from_connection(self, resp_options.request_url), jpayne@7: hpe, jpayne@7: exc_info=True, jpayne@7: ) jpayne@7: jpayne@7: headers = HTTPHeaderDict(httplib_response.msg.items()) jpayne@7: jpayne@7: response = HTTPResponse( jpayne@7: body=httplib_response, jpayne@7: headers=headers, jpayne@7: status=httplib_response.status, jpayne@7: version=httplib_response.version, jpayne@7: reason=httplib_response.reason, jpayne@7: preload_content=resp_options.preload_content, jpayne@7: decode_content=resp_options.decode_content, jpayne@7: original_response=httplib_response, jpayne@7: enforce_content_length=resp_options.enforce_content_length, jpayne@7: request_method=resp_options.request_method, jpayne@7: request_url=resp_options.request_url, jpayne@7: ) jpayne@7: return response jpayne@7: jpayne@7: jpayne@7: class HTTPSConnection(HTTPConnection): jpayne@7: """ jpayne@7: Many of the parameters to this constructor are passed to the underlying SSL jpayne@7: socket by means of :py:func:`urllib3.util.ssl_wrap_socket`. jpayne@7: """ jpayne@7: jpayne@7: default_port = port_by_scheme["https"] # type: ignore[misc] jpayne@7: jpayne@7: cert_reqs: int | str | None = None jpayne@7: ca_certs: str | None = None jpayne@7: ca_cert_dir: str | None = None jpayne@7: ca_cert_data: None | str | bytes = None jpayne@7: ssl_version: int | str | None = None jpayne@7: ssl_minimum_version: int | None = None jpayne@7: ssl_maximum_version: int | None = None jpayne@7: assert_fingerprint: str | None = None jpayne@7: jpayne@7: def __init__( jpayne@7: self, jpayne@7: host: str, jpayne@7: port: int | None = None, jpayne@7: *, jpayne@7: timeout: _TYPE_TIMEOUT = _DEFAULT_TIMEOUT, jpayne@7: source_address: tuple[str, int] | None = None, jpayne@7: blocksize: int = 16384, jpayne@7: socket_options: None jpayne@7: | (connection._TYPE_SOCKET_OPTIONS) = HTTPConnection.default_socket_options, jpayne@7: proxy: Url | None = None, jpayne@7: proxy_config: ProxyConfig | None = None, jpayne@7: cert_reqs: int | str | None = None, jpayne@7: assert_hostname: None | str | Literal[False] = None, jpayne@7: assert_fingerprint: str | None = None, jpayne@7: server_hostname: str | None = None, jpayne@7: ssl_context: ssl.SSLContext | None = None, jpayne@7: ca_certs: str | None = None, jpayne@7: ca_cert_dir: str | None = None, jpayne@7: ca_cert_data: None | str | bytes = None, jpayne@7: ssl_minimum_version: int | None = None, jpayne@7: ssl_maximum_version: int | None = None, jpayne@7: ssl_version: int | str | None = None, # Deprecated jpayne@7: cert_file: str | None = None, jpayne@7: key_file: str | None = None, jpayne@7: key_password: str | None = None, jpayne@7: ) -> None: jpayne@7: super().__init__( jpayne@7: host, jpayne@7: port=port, jpayne@7: timeout=timeout, jpayne@7: source_address=source_address, jpayne@7: blocksize=blocksize, jpayne@7: socket_options=socket_options, jpayne@7: proxy=proxy, jpayne@7: proxy_config=proxy_config, jpayne@7: ) jpayne@7: jpayne@7: self.key_file = key_file jpayne@7: self.cert_file = cert_file jpayne@7: self.key_password = key_password jpayne@7: self.ssl_context = ssl_context jpayne@7: self.server_hostname = server_hostname jpayne@7: self.assert_hostname = assert_hostname jpayne@7: self.assert_fingerprint = assert_fingerprint jpayne@7: self.ssl_version = ssl_version jpayne@7: self.ssl_minimum_version = ssl_minimum_version jpayne@7: self.ssl_maximum_version = ssl_maximum_version jpayne@7: self.ca_certs = ca_certs and os.path.expanduser(ca_certs) jpayne@7: self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir) jpayne@7: self.ca_cert_data = ca_cert_data jpayne@7: jpayne@7: # cert_reqs depends on ssl_context so calculate last. jpayne@7: if cert_reqs is None: jpayne@7: if self.ssl_context is not None: jpayne@7: cert_reqs = self.ssl_context.verify_mode jpayne@7: else: jpayne@7: cert_reqs = resolve_cert_reqs(None) jpayne@7: self.cert_reqs = cert_reqs jpayne@7: jpayne@7: def set_cert( jpayne@7: self, jpayne@7: key_file: str | None = None, jpayne@7: cert_file: str | None = None, jpayne@7: cert_reqs: int | str | None = None, jpayne@7: key_password: str | None = None, jpayne@7: ca_certs: str | None = None, jpayne@7: assert_hostname: None | str | Literal[False] = None, jpayne@7: assert_fingerprint: str | None = None, jpayne@7: ca_cert_dir: str | None = None, jpayne@7: ca_cert_data: None | str | bytes = None, jpayne@7: ) -> None: jpayne@7: """ jpayne@7: This method should only be called once, before the connection is used. jpayne@7: """ jpayne@7: warnings.warn( jpayne@7: "HTTPSConnection.set_cert() is deprecated and will be removed " jpayne@7: "in urllib3 v2.1.0. Instead provide the parameters to the " jpayne@7: "HTTPSConnection constructor.", jpayne@7: category=DeprecationWarning, jpayne@7: stacklevel=2, jpayne@7: ) jpayne@7: jpayne@7: # If cert_reqs is not provided we'll assume CERT_REQUIRED unless we also jpayne@7: # have an SSLContext object in which case we'll use its verify_mode. jpayne@7: if cert_reqs is None: jpayne@7: if self.ssl_context is not None: jpayne@7: cert_reqs = self.ssl_context.verify_mode jpayne@7: else: jpayne@7: cert_reqs = resolve_cert_reqs(None) jpayne@7: jpayne@7: self.key_file = key_file jpayne@7: self.cert_file = cert_file jpayne@7: self.cert_reqs = cert_reqs jpayne@7: self.key_password = key_password jpayne@7: self.assert_hostname = assert_hostname jpayne@7: self.assert_fingerprint = assert_fingerprint jpayne@7: self.ca_certs = ca_certs and os.path.expanduser(ca_certs) jpayne@7: self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir) jpayne@7: self.ca_cert_data = ca_cert_data jpayne@7: jpayne@7: def connect(self) -> None: jpayne@7: sock: socket.socket | ssl.SSLSocket jpayne@7: self.sock = sock = self._new_conn() jpayne@7: server_hostname: str = self.host jpayne@7: tls_in_tls = False jpayne@7: jpayne@7: # Do we need to establish a tunnel? jpayne@7: if self._tunnel_host is not None: jpayne@7: # We're tunneling to an HTTPS origin so need to do TLS-in-TLS. jpayne@7: if self._tunnel_scheme == "https": jpayne@7: # _connect_tls_proxy will verify and assign proxy_is_verified jpayne@7: self.sock = sock = self._connect_tls_proxy(self.host, sock) jpayne@7: tls_in_tls = True jpayne@7: elif self._tunnel_scheme == "http": jpayne@7: self.proxy_is_verified = False jpayne@7: jpayne@7: # If we're tunneling it means we're connected to our proxy. jpayne@7: self._has_connected_to_proxy = True jpayne@7: jpayne@7: self._tunnel() # type: ignore[attr-defined] jpayne@7: # Override the host with the one we're requesting data from. jpayne@7: server_hostname = self._tunnel_host jpayne@7: jpayne@7: if self.server_hostname is not None: jpayne@7: server_hostname = self.server_hostname jpayne@7: jpayne@7: is_time_off = datetime.date.today() < RECENT_DATE jpayne@7: if is_time_off: jpayne@7: warnings.warn( jpayne@7: ( jpayne@7: f"System time is way off (before {RECENT_DATE}). This will probably " jpayne@7: "lead to SSL verification errors" jpayne@7: ), jpayne@7: SystemTimeWarning, jpayne@7: ) jpayne@7: jpayne@7: # Remove trailing '.' from fqdn hostnames to allow certificate validation jpayne@7: server_hostname_rm_dot = server_hostname.rstrip(".") jpayne@7: jpayne@7: sock_and_verified = _ssl_wrap_socket_and_match_hostname( jpayne@7: sock=sock, jpayne@7: cert_reqs=self.cert_reqs, jpayne@7: ssl_version=self.ssl_version, jpayne@7: ssl_minimum_version=self.ssl_minimum_version, jpayne@7: ssl_maximum_version=self.ssl_maximum_version, jpayne@7: ca_certs=self.ca_certs, jpayne@7: ca_cert_dir=self.ca_cert_dir, jpayne@7: ca_cert_data=self.ca_cert_data, jpayne@7: cert_file=self.cert_file, jpayne@7: key_file=self.key_file, jpayne@7: key_password=self.key_password, jpayne@7: server_hostname=server_hostname_rm_dot, jpayne@7: ssl_context=self.ssl_context, jpayne@7: tls_in_tls=tls_in_tls, jpayne@7: assert_hostname=self.assert_hostname, jpayne@7: assert_fingerprint=self.assert_fingerprint, jpayne@7: ) jpayne@7: self.sock = sock_and_verified.socket jpayne@7: jpayne@7: # Forwarding proxies can never have a verified target since jpayne@7: # the proxy is the one doing the verification. Should instead jpayne@7: # use a CONNECT tunnel in order to verify the target. jpayne@7: # See: https://github.com/urllib3/urllib3/issues/3267. jpayne@7: if self.proxy_is_forwarding: jpayne@7: self.is_verified = False jpayne@7: else: jpayne@7: self.is_verified = sock_and_verified.is_verified jpayne@7: jpayne@7: # If there's a proxy to be connected to we are fully connected. jpayne@7: # This is set twice (once above and here) due to forwarding proxies jpayne@7: # not using tunnelling. jpayne@7: self._has_connected_to_proxy = bool(self.proxy) jpayne@7: jpayne@7: # Set `self.proxy_is_verified` unless it's already set while jpayne@7: # establishing a tunnel. jpayne@7: if self._has_connected_to_proxy and self.proxy_is_verified is None: jpayne@7: self.proxy_is_verified = sock_and_verified.is_verified jpayne@7: jpayne@7: def _connect_tls_proxy(self, hostname: str, sock: socket.socket) -> ssl.SSLSocket: jpayne@7: """ jpayne@7: Establish a TLS connection to the proxy using the provided SSL context. jpayne@7: """ jpayne@7: # `_connect_tls_proxy` is called when self._tunnel_host is truthy. jpayne@7: proxy_config = typing.cast(ProxyConfig, self.proxy_config) jpayne@7: ssl_context = proxy_config.ssl_context jpayne@7: sock_and_verified = _ssl_wrap_socket_and_match_hostname( jpayne@7: sock, jpayne@7: cert_reqs=self.cert_reqs, jpayne@7: ssl_version=self.ssl_version, jpayne@7: ssl_minimum_version=self.ssl_minimum_version, jpayne@7: ssl_maximum_version=self.ssl_maximum_version, jpayne@7: ca_certs=self.ca_certs, jpayne@7: ca_cert_dir=self.ca_cert_dir, jpayne@7: ca_cert_data=self.ca_cert_data, jpayne@7: server_hostname=hostname, jpayne@7: ssl_context=ssl_context, jpayne@7: assert_hostname=proxy_config.assert_hostname, jpayne@7: assert_fingerprint=proxy_config.assert_fingerprint, jpayne@7: # Features that aren't implemented for proxies yet: jpayne@7: cert_file=None, jpayne@7: key_file=None, jpayne@7: key_password=None, jpayne@7: tls_in_tls=False, jpayne@7: ) jpayne@7: self.proxy_is_verified = sock_and_verified.is_verified jpayne@7: return sock_and_verified.socket # type: ignore[return-value] jpayne@7: jpayne@7: jpayne@7: class _WrappedAndVerifiedSocket(typing.NamedTuple): jpayne@7: """ jpayne@7: Wrapped socket and whether the connection is jpayne@7: verified after the TLS handshake jpayne@7: """ jpayne@7: jpayne@7: socket: ssl.SSLSocket | SSLTransport jpayne@7: is_verified: bool jpayne@7: jpayne@7: jpayne@7: def _ssl_wrap_socket_and_match_hostname( jpayne@7: sock: socket.socket, jpayne@7: *, jpayne@7: cert_reqs: None | str | int, jpayne@7: ssl_version: None | str | int, jpayne@7: ssl_minimum_version: int | None, jpayne@7: ssl_maximum_version: int | None, jpayne@7: cert_file: str | None, jpayne@7: key_file: str | None, jpayne@7: key_password: str | None, jpayne@7: ca_certs: str | None, jpayne@7: ca_cert_dir: str | None, jpayne@7: ca_cert_data: None | str | bytes, jpayne@7: assert_hostname: None | str | Literal[False], jpayne@7: assert_fingerprint: str | None, jpayne@7: server_hostname: str | None, jpayne@7: ssl_context: ssl.SSLContext | None, jpayne@7: tls_in_tls: bool = False, jpayne@7: ) -> _WrappedAndVerifiedSocket: jpayne@7: """Logic for constructing an SSLContext from all TLS parameters, passing jpayne@7: that down into ssl_wrap_socket, and then doing certificate verification jpayne@7: either via hostname or fingerprint. This function exists to guarantee jpayne@7: that both proxies and targets have the same behavior when connecting via TLS. jpayne@7: """ jpayne@7: default_ssl_context = False jpayne@7: if ssl_context is None: jpayne@7: default_ssl_context = True jpayne@7: context = create_urllib3_context( jpayne@7: ssl_version=resolve_ssl_version(ssl_version), jpayne@7: ssl_minimum_version=ssl_minimum_version, jpayne@7: ssl_maximum_version=ssl_maximum_version, jpayne@7: cert_reqs=resolve_cert_reqs(cert_reqs), jpayne@7: ) jpayne@7: else: jpayne@7: context = ssl_context jpayne@7: jpayne@7: context.verify_mode = resolve_cert_reqs(cert_reqs) jpayne@7: jpayne@7: # In some cases, we want to verify hostnames ourselves jpayne@7: if ( jpayne@7: # `ssl` can't verify fingerprints or alternate hostnames jpayne@7: assert_fingerprint jpayne@7: or assert_hostname jpayne@7: # assert_hostname can be set to False to disable hostname checking jpayne@7: or assert_hostname is False jpayne@7: # We still support OpenSSL 1.0.2, which prevents us from verifying jpayne@7: # hostnames easily: https://github.com/pyca/pyopenssl/pull/933 jpayne@7: or ssl_.IS_PYOPENSSL jpayne@7: or not ssl_.HAS_NEVER_CHECK_COMMON_NAME jpayne@7: ): jpayne@7: context.check_hostname = False jpayne@7: jpayne@7: # Try to load OS default certs if none are given. We need to do the hasattr() check jpayne@7: # for custom pyOpenSSL SSLContext objects because they don't support jpayne@7: # load_default_certs(). jpayne@7: if ( jpayne@7: not ca_certs jpayne@7: and not ca_cert_dir jpayne@7: and not ca_cert_data jpayne@7: and default_ssl_context jpayne@7: and hasattr(context, "load_default_certs") jpayne@7: ): jpayne@7: context.load_default_certs() jpayne@7: jpayne@7: # Ensure that IPv6 addresses are in the proper format and don't have a jpayne@7: # scope ID. Python's SSL module fails to recognize scoped IPv6 addresses jpayne@7: # and interprets them as DNS hostnames. jpayne@7: if server_hostname is not None: jpayne@7: normalized = server_hostname.strip("[]") jpayne@7: if "%" in normalized: jpayne@7: normalized = normalized[: normalized.rfind("%")] jpayne@7: if is_ipaddress(normalized): jpayne@7: server_hostname = normalized jpayne@7: jpayne@7: ssl_sock = ssl_wrap_socket( jpayne@7: sock=sock, jpayne@7: keyfile=key_file, jpayne@7: certfile=cert_file, jpayne@7: key_password=key_password, jpayne@7: ca_certs=ca_certs, jpayne@7: ca_cert_dir=ca_cert_dir, jpayne@7: ca_cert_data=ca_cert_data, jpayne@7: server_hostname=server_hostname, jpayne@7: ssl_context=context, jpayne@7: tls_in_tls=tls_in_tls, jpayne@7: ) jpayne@7: jpayne@7: try: jpayne@7: if assert_fingerprint: jpayne@7: _assert_fingerprint( jpayne@7: ssl_sock.getpeercert(binary_form=True), assert_fingerprint jpayne@7: ) jpayne@7: elif ( jpayne@7: context.verify_mode != ssl.CERT_NONE jpayne@7: and not context.check_hostname jpayne@7: and assert_hostname is not False jpayne@7: ): jpayne@7: cert: _TYPE_PEER_CERT_RET_DICT = ssl_sock.getpeercert() # type: ignore[assignment] jpayne@7: jpayne@7: # Need to signal to our match_hostname whether to use 'commonName' or not. jpayne@7: # If we're using our own constructed SSLContext we explicitly set 'False' jpayne@7: # because PyPy hard-codes 'True' from SSLContext.hostname_checks_common_name. jpayne@7: if default_ssl_context: jpayne@7: hostname_checks_common_name = False jpayne@7: else: jpayne@7: hostname_checks_common_name = ( jpayne@7: getattr(context, "hostname_checks_common_name", False) or False jpayne@7: ) jpayne@7: jpayne@7: _match_hostname( jpayne@7: cert, jpayne@7: assert_hostname or server_hostname, # type: ignore[arg-type] jpayne@7: hostname_checks_common_name, jpayne@7: ) jpayne@7: jpayne@7: return _WrappedAndVerifiedSocket( jpayne@7: socket=ssl_sock, jpayne@7: is_verified=context.verify_mode == ssl.CERT_REQUIRED jpayne@7: or bool(assert_fingerprint), jpayne@7: ) jpayne@7: except BaseException: jpayne@7: ssl_sock.close() jpayne@7: raise jpayne@7: jpayne@7: jpayne@7: def _match_hostname( jpayne@7: cert: _TYPE_PEER_CERT_RET_DICT | None, jpayne@7: asserted_hostname: str, jpayne@7: hostname_checks_common_name: bool = False, jpayne@7: ) -> None: jpayne@7: # Our upstream implementation of ssl.match_hostname() jpayne@7: # only applies this normalization to IP addresses so it doesn't jpayne@7: # match DNS SANs so we do the same thing! jpayne@7: stripped_hostname = asserted_hostname.strip("[]") jpayne@7: if is_ipaddress(stripped_hostname): jpayne@7: asserted_hostname = stripped_hostname jpayne@7: jpayne@7: try: jpayne@7: match_hostname(cert, asserted_hostname, hostname_checks_common_name) jpayne@7: except CertificateError as e: jpayne@7: log.warning( jpayne@7: "Certificate did not match expected hostname: %s. Certificate: %s", jpayne@7: asserted_hostname, jpayne@7: cert, jpayne@7: ) jpayne@7: # Add cert to exception and reraise so client code can inspect jpayne@7: # the cert when catching the exception, if they want to jpayne@7: e._peer_cert = cert # type: ignore[attr-defined] jpayne@7: raise jpayne@7: jpayne@7: jpayne@7: def _wrap_proxy_error(err: Exception, proxy_scheme: str | None) -> ProxyError: jpayne@7: # Look for the phrase 'wrong version number', if found jpayne@7: # then we should warn the user that we're very sure that jpayne@7: # this proxy is HTTP-only and they have a configuration issue. jpayne@7: error_normalized = " ".join(re.split("[^a-z]", str(err).lower())) jpayne@7: is_likely_http_proxy = ( jpayne@7: "wrong version number" in error_normalized jpayne@7: or "unknown protocol" in error_normalized jpayne@7: or "record layer failure" in error_normalized jpayne@7: ) jpayne@7: http_proxy_warning = ( jpayne@7: ". Your proxy appears to only use HTTP and not HTTPS, " jpayne@7: "try changing your proxy URL to be HTTP. See: " jpayne@7: "https://urllib3.readthedocs.io/en/latest/advanced-usage.html" jpayne@7: "#https-proxy-error-http-proxy" jpayne@7: ) jpayne@7: new_err = ProxyError( jpayne@7: f"Unable to connect to proxy" jpayne@7: f"{http_proxy_warning if is_likely_http_proxy and proxy_scheme == 'https' else ''}", jpayne@7: err, jpayne@7: ) jpayne@7: new_err.__cause__ = err jpayne@7: return new_err jpayne@7: jpayne@7: jpayne@7: def _get_default_user_agent() -> str: jpayne@7: return f"python-urllib3/{__version__}" jpayne@7: jpayne@7: jpayne@7: class DummyConnection: jpayne@7: """Used to detect a failed ConnectionCls import.""" jpayne@7: jpayne@7: jpayne@7: if not ssl: jpayne@7: HTTPSConnection = DummyConnection # type: ignore[misc, assignment] # noqa: F811 jpayne@7: jpayne@7: jpayne@7: VerifiedHTTPSConnection = HTTPSConnection jpayne@7: jpayne@7: jpayne@7: def _url_from_connection( jpayne@7: conn: HTTPConnection | HTTPSConnection, path: str | None = None jpayne@7: ) -> str: jpayne@7: """Returns the URL from a given connection. This is mainly used for testing and logging.""" jpayne@7: jpayne@7: scheme = "https" if isinstance(conn, HTTPSConnection) else "http" jpayne@7: jpayne@7: return Url(scheme=scheme, host=conn.host, port=conn.port, path=path).url