jpayne@68: """ jpayne@68: requests.models jpayne@68: ~~~~~~~~~~~~~~~ jpayne@68: jpayne@68: This module contains the primary objects that power Requests. jpayne@68: """ jpayne@68: jpayne@68: import datetime jpayne@68: jpayne@68: # Import encoding now, to avoid implicit import later. jpayne@68: # Implicit import within threads may cause LookupError when standard library is in a ZIP, jpayne@68: # such as in Embedded Python. See https://github.com/psf/requests/issues/3578. jpayne@68: import encodings.idna # noqa: F401 jpayne@68: from io import UnsupportedOperation jpayne@68: jpayne@68: from urllib3.exceptions import ( jpayne@68: DecodeError, jpayne@68: LocationParseError, jpayne@68: ProtocolError, jpayne@68: ReadTimeoutError, jpayne@68: SSLError, jpayne@68: ) jpayne@68: from urllib3.fields import RequestField jpayne@68: from urllib3.filepost import encode_multipart_formdata jpayne@68: from urllib3.util import parse_url jpayne@68: jpayne@68: from ._internal_utils import to_native_string, unicode_is_ascii jpayne@68: from .auth import HTTPBasicAuth jpayne@68: from .compat import ( jpayne@68: Callable, jpayne@68: JSONDecodeError, jpayne@68: Mapping, jpayne@68: basestring, jpayne@68: builtin_str, jpayne@68: chardet, jpayne@68: cookielib, jpayne@68: ) jpayne@68: from .compat import json as complexjson jpayne@68: from .compat import urlencode, urlsplit, urlunparse jpayne@68: from .cookies import _copy_cookie_jar, cookiejar_from_dict, get_cookie_header jpayne@68: from .exceptions import ( jpayne@68: ChunkedEncodingError, jpayne@68: ConnectionError, jpayne@68: ContentDecodingError, jpayne@68: HTTPError, jpayne@68: InvalidJSONError, jpayne@68: InvalidURL, jpayne@68: ) jpayne@68: from .exceptions import JSONDecodeError as RequestsJSONDecodeError jpayne@68: from .exceptions import MissingSchema jpayne@68: from .exceptions import SSLError as RequestsSSLError jpayne@68: from .exceptions import StreamConsumedError jpayne@68: from .hooks import default_hooks jpayne@68: from .status_codes import codes jpayne@68: from .structures import CaseInsensitiveDict jpayne@68: from .utils import ( jpayne@68: check_header_validity, jpayne@68: get_auth_from_url, jpayne@68: guess_filename, jpayne@68: guess_json_utf, jpayne@68: iter_slices, jpayne@68: parse_header_links, jpayne@68: requote_uri, jpayne@68: stream_decode_response_unicode, jpayne@68: super_len, jpayne@68: to_key_val_list, jpayne@68: ) jpayne@68: jpayne@68: #: The set of HTTP status codes that indicate an automatically jpayne@68: #: processable redirect. jpayne@68: REDIRECT_STATI = ( jpayne@68: codes.moved, # 301 jpayne@68: codes.found, # 302 jpayne@68: codes.other, # 303 jpayne@68: codes.temporary_redirect, # 307 jpayne@68: codes.permanent_redirect, # 308 jpayne@68: ) jpayne@68: jpayne@68: DEFAULT_REDIRECT_LIMIT = 30 jpayne@68: CONTENT_CHUNK_SIZE = 10 * 1024 jpayne@68: ITER_CHUNK_SIZE = 512 jpayne@68: jpayne@68: jpayne@68: class RequestEncodingMixin: jpayne@68: @property jpayne@68: def path_url(self): jpayne@68: """Build the path URL to use.""" jpayne@68: jpayne@68: url = [] jpayne@68: jpayne@68: p = urlsplit(self.url) jpayne@68: jpayne@68: path = p.path jpayne@68: if not path: jpayne@68: path = "/" jpayne@68: jpayne@68: url.append(path) jpayne@68: jpayne@68: query = p.query jpayne@68: if query: jpayne@68: url.append("?") jpayne@68: url.append(query) jpayne@68: jpayne@68: return "".join(url) jpayne@68: jpayne@68: @staticmethod jpayne@68: def _encode_params(data): jpayne@68: """Encode parameters in a piece of data. jpayne@68: jpayne@68: Will successfully encode parameters when passed as a dict or a list of jpayne@68: 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary jpayne@68: if parameters are supplied as a dict. jpayne@68: """ jpayne@68: jpayne@68: if isinstance(data, (str, bytes)): jpayne@68: return data jpayne@68: elif hasattr(data, "read"): jpayne@68: return data jpayne@68: elif hasattr(data, "__iter__"): jpayne@68: result = [] jpayne@68: for k, vs in to_key_val_list(data): jpayne@68: if isinstance(vs, basestring) or not hasattr(vs, "__iter__"): jpayne@68: vs = [vs] jpayne@68: for v in vs: jpayne@68: if v is not None: jpayne@68: result.append( jpayne@68: ( jpayne@68: k.encode("utf-8") if isinstance(k, str) else k, jpayne@68: v.encode("utf-8") if isinstance(v, str) else v, jpayne@68: ) jpayne@68: ) jpayne@68: return urlencode(result, doseq=True) jpayne@68: else: jpayne@68: return data jpayne@68: jpayne@68: @staticmethod jpayne@68: def _encode_files(files, data): jpayne@68: """Build the body for a multipart/form-data request. jpayne@68: jpayne@68: Will successfully encode files when passed as a dict or a list of jpayne@68: tuples. Order is retained if data is a list of tuples but arbitrary jpayne@68: if parameters are supplied as a dict. jpayne@68: The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) jpayne@68: or 4-tuples (filename, fileobj, contentype, custom_headers). jpayne@68: """ jpayne@68: if not files: jpayne@68: raise ValueError("Files must be provided.") jpayne@68: elif isinstance(data, basestring): jpayne@68: raise ValueError("Data must not be a string.") jpayne@68: jpayne@68: new_fields = [] jpayne@68: fields = to_key_val_list(data or {}) jpayne@68: files = to_key_val_list(files or {}) jpayne@68: jpayne@68: for field, val in fields: jpayne@68: if isinstance(val, basestring) or not hasattr(val, "__iter__"): jpayne@68: val = [val] jpayne@68: for v in val: jpayne@68: if v is not None: jpayne@68: # Don't call str() on bytestrings: in Py3 it all goes wrong. jpayne@68: if not isinstance(v, bytes): jpayne@68: v = str(v) jpayne@68: jpayne@68: new_fields.append( jpayne@68: ( jpayne@68: field.decode("utf-8") jpayne@68: if isinstance(field, bytes) jpayne@68: else field, jpayne@68: v.encode("utf-8") if isinstance(v, str) else v, jpayne@68: ) jpayne@68: ) jpayne@68: jpayne@68: for k, v in files: jpayne@68: # support for explicit filename jpayne@68: ft = None jpayne@68: fh = None jpayne@68: if isinstance(v, (tuple, list)): jpayne@68: if len(v) == 2: jpayne@68: fn, fp = v jpayne@68: elif len(v) == 3: jpayne@68: fn, fp, ft = v jpayne@68: else: jpayne@68: fn, fp, ft, fh = v jpayne@68: else: jpayne@68: fn = guess_filename(v) or k jpayne@68: fp = v jpayne@68: jpayne@68: if isinstance(fp, (str, bytes, bytearray)): jpayne@68: fdata = fp jpayne@68: elif hasattr(fp, "read"): jpayne@68: fdata = fp.read() jpayne@68: elif fp is None: jpayne@68: continue jpayne@68: else: jpayne@68: fdata = fp jpayne@68: jpayne@68: rf = RequestField(name=k, data=fdata, filename=fn, headers=fh) jpayne@68: rf.make_multipart(content_type=ft) jpayne@68: new_fields.append(rf) jpayne@68: jpayne@68: body, content_type = encode_multipart_formdata(new_fields) jpayne@68: jpayne@68: return body, content_type jpayne@68: jpayne@68: jpayne@68: class RequestHooksMixin: jpayne@68: def register_hook(self, event, hook): jpayne@68: """Properly register a hook.""" jpayne@68: jpayne@68: if event not in self.hooks: jpayne@68: raise ValueError(f'Unsupported event specified, with event name "{event}"') jpayne@68: jpayne@68: if isinstance(hook, Callable): jpayne@68: self.hooks[event].append(hook) jpayne@68: elif hasattr(hook, "__iter__"): jpayne@68: self.hooks[event].extend(h for h in hook if isinstance(h, Callable)) jpayne@68: jpayne@68: def deregister_hook(self, event, hook): jpayne@68: """Deregister a previously registered hook. jpayne@68: Returns True if the hook existed, False if not. jpayne@68: """ jpayne@68: jpayne@68: try: jpayne@68: self.hooks[event].remove(hook) jpayne@68: return True jpayne@68: except ValueError: jpayne@68: return False jpayne@68: jpayne@68: jpayne@68: class Request(RequestHooksMixin): jpayne@68: """A user-created :class:`Request ` object. jpayne@68: jpayne@68: Used to prepare a :class:`PreparedRequest `, which is sent to the server. jpayne@68: jpayne@68: :param method: HTTP method to use. jpayne@68: :param url: URL to send. jpayne@68: :param headers: dictionary of headers to send. jpayne@68: :param files: dictionary of {filename: fileobject} files to multipart upload. jpayne@68: :param data: the body to attach to the request. If a dictionary or jpayne@68: list of tuples ``[(key, value)]`` is provided, form-encoding will jpayne@68: take place. jpayne@68: :param json: json for the body to attach to the request (if files or data is not specified). jpayne@68: :param params: URL parameters to append to the URL. If a dictionary or jpayne@68: list of tuples ``[(key, value)]`` is provided, form-encoding will jpayne@68: take place. jpayne@68: :param auth: Auth handler or (user, pass) tuple. jpayne@68: :param cookies: dictionary or CookieJar of cookies to attach to this request. jpayne@68: :param hooks: dictionary of callback hooks, for internal usage. jpayne@68: jpayne@68: Usage:: jpayne@68: jpayne@68: >>> import requests jpayne@68: >>> req = requests.Request('GET', 'https://httpbin.org/get') jpayne@68: >>> req.prepare() jpayne@68: jpayne@68: """ jpayne@68: jpayne@68: def __init__( jpayne@68: self, jpayne@68: method=None, jpayne@68: url=None, jpayne@68: headers=None, jpayne@68: files=None, jpayne@68: data=None, jpayne@68: params=None, jpayne@68: auth=None, jpayne@68: cookies=None, jpayne@68: hooks=None, jpayne@68: json=None, jpayne@68: ): jpayne@68: # Default empty dicts for dict params. jpayne@68: data = [] if data is None else data jpayne@68: files = [] if files is None else files jpayne@68: headers = {} if headers is None else headers jpayne@68: params = {} if params is None else params jpayne@68: hooks = {} if hooks is None else hooks jpayne@68: jpayne@68: self.hooks = default_hooks() jpayne@68: for k, v in list(hooks.items()): jpayne@68: self.register_hook(event=k, hook=v) jpayne@68: jpayne@68: self.method = method jpayne@68: self.url = url jpayne@68: self.headers = headers jpayne@68: self.files = files jpayne@68: self.data = data jpayne@68: self.json = json jpayne@68: self.params = params jpayne@68: self.auth = auth jpayne@68: self.cookies = cookies jpayne@68: jpayne@68: def __repr__(self): jpayne@68: return f"" jpayne@68: jpayne@68: def prepare(self): jpayne@68: """Constructs a :class:`PreparedRequest ` for transmission and returns it.""" jpayne@68: p = PreparedRequest() jpayne@68: p.prepare( jpayne@68: method=self.method, jpayne@68: url=self.url, jpayne@68: headers=self.headers, jpayne@68: files=self.files, jpayne@68: data=self.data, jpayne@68: json=self.json, jpayne@68: params=self.params, jpayne@68: auth=self.auth, jpayne@68: cookies=self.cookies, jpayne@68: hooks=self.hooks, jpayne@68: ) jpayne@68: return p jpayne@68: jpayne@68: jpayne@68: class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): jpayne@68: """The fully mutable :class:`PreparedRequest ` object, jpayne@68: containing the exact bytes that will be sent to the server. jpayne@68: jpayne@68: Instances are generated from a :class:`Request ` object, and jpayne@68: should not be instantiated manually; doing so may produce undesirable jpayne@68: effects. jpayne@68: jpayne@68: Usage:: jpayne@68: jpayne@68: >>> import requests jpayne@68: >>> req = requests.Request('GET', 'https://httpbin.org/get') jpayne@68: >>> r = req.prepare() jpayne@68: >>> r jpayne@68: jpayne@68: jpayne@68: >>> s = requests.Session() jpayne@68: >>> s.send(r) jpayne@68: jpayne@68: """ jpayne@68: jpayne@68: def __init__(self): jpayne@68: #: HTTP verb to send to the server. jpayne@68: self.method = None jpayne@68: #: HTTP URL to send the request to. jpayne@68: self.url = None jpayne@68: #: dictionary of HTTP headers. jpayne@68: self.headers = None jpayne@68: # The `CookieJar` used to create the Cookie header will be stored here jpayne@68: # after prepare_cookies is called jpayne@68: self._cookies = None jpayne@68: #: request body to send to the server. jpayne@68: self.body = None jpayne@68: #: dictionary of callback hooks, for internal usage. jpayne@68: self.hooks = default_hooks() jpayne@68: #: integer denoting starting position of a readable file-like body. jpayne@68: self._body_position = None jpayne@68: jpayne@68: def prepare( jpayne@68: self, jpayne@68: method=None, jpayne@68: url=None, jpayne@68: headers=None, jpayne@68: files=None, jpayne@68: data=None, jpayne@68: params=None, jpayne@68: auth=None, jpayne@68: cookies=None, jpayne@68: hooks=None, jpayne@68: json=None, jpayne@68: ): jpayne@68: """Prepares the entire request with the given parameters.""" jpayne@68: jpayne@68: self.prepare_method(method) jpayne@68: self.prepare_url(url, params) jpayne@68: self.prepare_headers(headers) jpayne@68: self.prepare_cookies(cookies) jpayne@68: self.prepare_body(data, files, json) jpayne@68: self.prepare_auth(auth, url) jpayne@68: jpayne@68: # Note that prepare_auth must be last to enable authentication schemes jpayne@68: # such as OAuth to work on a fully prepared request. jpayne@68: jpayne@68: # This MUST go after prepare_auth. Authenticators could add a hook jpayne@68: self.prepare_hooks(hooks) jpayne@68: jpayne@68: def __repr__(self): jpayne@68: return f"" jpayne@68: jpayne@68: def copy(self): jpayne@68: p = PreparedRequest() jpayne@68: p.method = self.method jpayne@68: p.url = self.url jpayne@68: p.headers = self.headers.copy() if self.headers is not None else None jpayne@68: p._cookies = _copy_cookie_jar(self._cookies) jpayne@68: p.body = self.body jpayne@68: p.hooks = self.hooks jpayne@68: p._body_position = self._body_position jpayne@68: return p jpayne@68: jpayne@68: def prepare_method(self, method): jpayne@68: """Prepares the given HTTP method.""" jpayne@68: self.method = method jpayne@68: if self.method is not None: jpayne@68: self.method = to_native_string(self.method.upper()) jpayne@68: jpayne@68: @staticmethod jpayne@68: def _get_idna_encoded_host(host): jpayne@68: import idna jpayne@68: jpayne@68: try: jpayne@68: host = idna.encode(host, uts46=True).decode("utf-8") jpayne@68: except idna.IDNAError: jpayne@68: raise UnicodeError jpayne@68: return host jpayne@68: jpayne@68: def prepare_url(self, url, params): jpayne@68: """Prepares the given HTTP URL.""" jpayne@68: #: Accept objects that have string representations. jpayne@68: #: We're unable to blindly call unicode/str functions jpayne@68: #: as this will include the bytestring indicator (b'') jpayne@68: #: on python 3.x. jpayne@68: #: https://github.com/psf/requests/pull/2238 jpayne@68: if isinstance(url, bytes): jpayne@68: url = url.decode("utf8") jpayne@68: else: jpayne@68: url = str(url) jpayne@68: jpayne@68: # Remove leading whitespaces from url jpayne@68: url = url.lstrip() jpayne@68: jpayne@68: # Don't do any URL preparation for non-HTTP schemes like `mailto`, jpayne@68: # `data` etc to work around exceptions from `url_parse`, which jpayne@68: # handles RFC 3986 only. jpayne@68: if ":" in url and not url.lower().startswith("http"): jpayne@68: self.url = url jpayne@68: return jpayne@68: jpayne@68: # Support for unicode domain names and paths. jpayne@68: try: jpayne@68: scheme, auth, host, port, path, query, fragment = parse_url(url) jpayne@68: except LocationParseError as e: jpayne@68: raise InvalidURL(*e.args) jpayne@68: jpayne@68: if not scheme: jpayne@68: raise MissingSchema( jpayne@68: f"Invalid URL {url!r}: No scheme supplied. " jpayne@68: f"Perhaps you meant https://{url}?" jpayne@68: ) jpayne@68: jpayne@68: if not host: jpayne@68: raise InvalidURL(f"Invalid URL {url!r}: No host supplied") jpayne@68: jpayne@68: # In general, we want to try IDNA encoding the hostname if the string contains jpayne@68: # non-ASCII characters. This allows users to automatically get the correct IDNA jpayne@68: # behaviour. For strings containing only ASCII characters, we need to also verify jpayne@68: # it doesn't start with a wildcard (*), before allowing the unencoded hostname. jpayne@68: if not unicode_is_ascii(host): jpayne@68: try: jpayne@68: host = self._get_idna_encoded_host(host) jpayne@68: except UnicodeError: jpayne@68: raise InvalidURL("URL has an invalid label.") jpayne@68: elif host.startswith(("*", ".")): jpayne@68: raise InvalidURL("URL has an invalid label.") jpayne@68: jpayne@68: # Carefully reconstruct the network location jpayne@68: netloc = auth or "" jpayne@68: if netloc: jpayne@68: netloc += "@" jpayne@68: netloc += host jpayne@68: if port: jpayne@68: netloc += f":{port}" jpayne@68: jpayne@68: # Bare domains aren't valid URLs. jpayne@68: if not path: jpayne@68: path = "/" jpayne@68: jpayne@68: if isinstance(params, (str, bytes)): jpayne@68: params = to_native_string(params) jpayne@68: jpayne@68: enc_params = self._encode_params(params) jpayne@68: if enc_params: jpayne@68: if query: jpayne@68: query = f"{query}&{enc_params}" jpayne@68: else: jpayne@68: query = enc_params jpayne@68: jpayne@68: url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment])) jpayne@68: self.url = url jpayne@68: jpayne@68: def prepare_headers(self, headers): jpayne@68: """Prepares the given HTTP headers.""" jpayne@68: jpayne@68: self.headers = CaseInsensitiveDict() jpayne@68: if headers: jpayne@68: for header in headers.items(): jpayne@68: # Raise exception on invalid header value. jpayne@68: check_header_validity(header) jpayne@68: name, value = header jpayne@68: self.headers[to_native_string(name)] = value jpayne@68: jpayne@68: def prepare_body(self, data, files, json=None): jpayne@68: """Prepares the given HTTP body data.""" jpayne@68: jpayne@68: # Check if file, fo, generator, iterator. jpayne@68: # If not, run through normal process. jpayne@68: jpayne@68: # Nottin' on you. jpayne@68: body = None jpayne@68: content_type = None jpayne@68: jpayne@68: if not data and json is not None: jpayne@68: # urllib3 requires a bytes-like body. Python 2's json.dumps jpayne@68: # provides this natively, but Python 3 gives a Unicode string. jpayne@68: content_type = "application/json" jpayne@68: jpayne@68: try: jpayne@68: body = complexjson.dumps(json, allow_nan=False) jpayne@68: except ValueError as ve: jpayne@68: raise InvalidJSONError(ve, request=self) jpayne@68: jpayne@68: if not isinstance(body, bytes): jpayne@68: body = body.encode("utf-8") jpayne@68: jpayne@68: is_stream = all( jpayne@68: [ jpayne@68: hasattr(data, "__iter__"), jpayne@68: not isinstance(data, (basestring, list, tuple, Mapping)), jpayne@68: ] jpayne@68: ) jpayne@68: jpayne@68: if is_stream: jpayne@68: try: jpayne@68: length = super_len(data) jpayne@68: except (TypeError, AttributeError, UnsupportedOperation): jpayne@68: length = None jpayne@68: jpayne@68: body = data jpayne@68: jpayne@68: if getattr(body, "tell", None) is not None: jpayne@68: # Record the current file position before reading. jpayne@68: # This will allow us to rewind a file in the event jpayne@68: # of a redirect. jpayne@68: try: jpayne@68: self._body_position = body.tell() jpayne@68: except OSError: jpayne@68: # This differentiates from None, allowing us to catch jpayne@68: # a failed `tell()` later when trying to rewind the body jpayne@68: self._body_position = object() jpayne@68: jpayne@68: if files: jpayne@68: raise NotImplementedError( jpayne@68: "Streamed bodies and files are mutually exclusive." jpayne@68: ) jpayne@68: jpayne@68: if length: jpayne@68: self.headers["Content-Length"] = builtin_str(length) jpayne@68: else: jpayne@68: self.headers["Transfer-Encoding"] = "chunked" jpayne@68: else: jpayne@68: # Multi-part file uploads. jpayne@68: if files: jpayne@68: (body, content_type) = self._encode_files(files, data) jpayne@68: else: jpayne@68: if data: jpayne@68: body = self._encode_params(data) jpayne@68: if isinstance(data, basestring) or hasattr(data, "read"): jpayne@68: content_type = None jpayne@68: else: jpayne@68: content_type = "application/x-www-form-urlencoded" jpayne@68: jpayne@68: self.prepare_content_length(body) jpayne@68: jpayne@68: # Add content-type if it wasn't explicitly provided. jpayne@68: if content_type and ("content-type" not in self.headers): jpayne@68: self.headers["Content-Type"] = content_type jpayne@68: jpayne@68: self.body = body jpayne@68: jpayne@68: def prepare_content_length(self, body): jpayne@68: """Prepare Content-Length header based on request method and body""" jpayne@68: if body is not None: jpayne@68: length = super_len(body) jpayne@68: if length: jpayne@68: # If length exists, set it. Otherwise, we fallback jpayne@68: # to Transfer-Encoding: chunked. jpayne@68: self.headers["Content-Length"] = builtin_str(length) jpayne@68: elif ( jpayne@68: self.method not in ("GET", "HEAD") jpayne@68: and self.headers.get("Content-Length") is None jpayne@68: ): jpayne@68: # Set Content-Length to 0 for methods that can have a body jpayne@68: # but don't provide one. (i.e. not GET or HEAD) jpayne@68: self.headers["Content-Length"] = "0" jpayne@68: jpayne@68: def prepare_auth(self, auth, url=""): jpayne@68: """Prepares the given HTTP auth data.""" jpayne@68: jpayne@68: # If no Auth is explicitly provided, extract it from the URL first. jpayne@68: if auth is None: jpayne@68: url_auth = get_auth_from_url(self.url) jpayne@68: auth = url_auth if any(url_auth) else None jpayne@68: jpayne@68: if auth: jpayne@68: if isinstance(auth, tuple) and len(auth) == 2: jpayne@68: # special-case basic HTTP auth jpayne@68: auth = HTTPBasicAuth(*auth) jpayne@68: jpayne@68: # Allow auth to make its changes. jpayne@68: r = auth(self) jpayne@68: jpayne@68: # Update self to reflect the auth changes. jpayne@68: self.__dict__.update(r.__dict__) jpayne@68: jpayne@68: # Recompute Content-Length jpayne@68: self.prepare_content_length(self.body) jpayne@68: jpayne@68: def prepare_cookies(self, cookies): jpayne@68: """Prepares the given HTTP cookie data. jpayne@68: jpayne@68: This function eventually generates a ``Cookie`` header from the jpayne@68: given cookies using cookielib. Due to cookielib's design, the header jpayne@68: will not be regenerated if it already exists, meaning this function jpayne@68: can only be called once for the life of the jpayne@68: :class:`PreparedRequest ` object. Any subsequent calls jpayne@68: to ``prepare_cookies`` will have no actual effect, unless the "Cookie" jpayne@68: header is removed beforehand. jpayne@68: """ jpayne@68: if isinstance(cookies, cookielib.CookieJar): jpayne@68: self._cookies = cookies jpayne@68: else: jpayne@68: self._cookies = cookiejar_from_dict(cookies) jpayne@68: jpayne@68: cookie_header = get_cookie_header(self._cookies, self) jpayne@68: if cookie_header is not None: jpayne@68: self.headers["Cookie"] = cookie_header jpayne@68: jpayne@68: def prepare_hooks(self, hooks): jpayne@68: """Prepares the given hooks.""" jpayne@68: # hooks can be passed as None to the prepare method and to this jpayne@68: # method. To prevent iterating over None, simply use an empty list jpayne@68: # if hooks is False-y jpayne@68: hooks = hooks or [] jpayne@68: for event in hooks: jpayne@68: self.register_hook(event, hooks[event]) jpayne@68: jpayne@68: jpayne@68: class Response: jpayne@68: """The :class:`Response ` object, which contains a jpayne@68: server's response to an HTTP request. jpayne@68: """ jpayne@68: jpayne@68: __attrs__ = [ jpayne@68: "_content", jpayne@68: "status_code", jpayne@68: "headers", jpayne@68: "url", jpayne@68: "history", jpayne@68: "encoding", jpayne@68: "reason", jpayne@68: "cookies", jpayne@68: "elapsed", jpayne@68: "request", jpayne@68: ] jpayne@68: jpayne@68: def __init__(self): jpayne@68: self._content = False jpayne@68: self._content_consumed = False jpayne@68: self._next = None jpayne@68: jpayne@68: #: Integer Code of responded HTTP Status, e.g. 404 or 200. jpayne@68: self.status_code = None jpayne@68: jpayne@68: #: Case-insensitive Dictionary of Response Headers. jpayne@68: #: For example, ``headers['content-encoding']`` will return the jpayne@68: #: value of a ``'Content-Encoding'`` response header. jpayne@68: self.headers = CaseInsensitiveDict() jpayne@68: jpayne@68: #: File-like object representation of response (for advanced usage). jpayne@68: #: Use of ``raw`` requires that ``stream=True`` be set on the request. jpayne@68: #: This requirement does not apply for use internally to Requests. jpayne@68: self.raw = None jpayne@68: jpayne@68: #: Final URL location of Response. jpayne@68: self.url = None jpayne@68: jpayne@68: #: Encoding to decode with when accessing r.text. jpayne@68: self.encoding = None jpayne@68: jpayne@68: #: A list of :class:`Response ` objects from jpayne@68: #: the history of the Request. Any redirect responses will end jpayne@68: #: up here. The list is sorted from the oldest to the most recent request. jpayne@68: self.history = [] jpayne@68: jpayne@68: #: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK". jpayne@68: self.reason = None jpayne@68: jpayne@68: #: A CookieJar of Cookies the server sent back. jpayne@68: self.cookies = cookiejar_from_dict({}) jpayne@68: jpayne@68: #: The amount of time elapsed between sending the request jpayne@68: #: and the arrival of the response (as a timedelta). jpayne@68: #: This property specifically measures the time taken between sending jpayne@68: #: the first byte of the request and finishing parsing the headers. It jpayne@68: #: is therefore unaffected by consuming the response content or the jpayne@68: #: value of the ``stream`` keyword argument. jpayne@68: self.elapsed = datetime.timedelta(0) jpayne@68: jpayne@68: #: The :class:`PreparedRequest ` object to which this jpayne@68: #: is a response. jpayne@68: self.request = None jpayne@68: jpayne@68: def __enter__(self): jpayne@68: return self jpayne@68: jpayne@68: def __exit__(self, *args): jpayne@68: self.close() jpayne@68: jpayne@68: def __getstate__(self): jpayne@68: # Consume everything; accessing the content attribute makes jpayne@68: # sure the content has been fully read. jpayne@68: if not self._content_consumed: jpayne@68: self.content jpayne@68: jpayne@68: return {attr: getattr(self, attr, None) for attr in self.__attrs__} jpayne@68: jpayne@68: def __setstate__(self, state): jpayne@68: for name, value in state.items(): jpayne@68: setattr(self, name, value) jpayne@68: jpayne@68: # pickled objects do not have .raw jpayne@68: setattr(self, "_content_consumed", True) jpayne@68: setattr(self, "raw", None) jpayne@68: jpayne@68: def __repr__(self): jpayne@68: return f"" jpayne@68: jpayne@68: def __bool__(self): jpayne@68: """Returns True if :attr:`status_code` is less than 400. jpayne@68: jpayne@68: This attribute checks if the status code of the response is between jpayne@68: 400 and 600 to see if there was a client error or a server error. If jpayne@68: the status code, is between 200 and 400, this will return True. This jpayne@68: is **not** a check to see if the response code is ``200 OK``. jpayne@68: """ jpayne@68: return self.ok jpayne@68: jpayne@68: def __nonzero__(self): jpayne@68: """Returns True if :attr:`status_code` is less than 400. jpayne@68: jpayne@68: This attribute checks if the status code of the response is between jpayne@68: 400 and 600 to see if there was a client error or a server error. If jpayne@68: the status code, is between 200 and 400, this will return True. This jpayne@68: is **not** a check to see if the response code is ``200 OK``. jpayne@68: """ jpayne@68: return self.ok jpayne@68: jpayne@68: def __iter__(self): jpayne@68: """Allows you to use a response as an iterator.""" jpayne@68: return self.iter_content(128) jpayne@68: jpayne@68: @property jpayne@68: def ok(self): jpayne@68: """Returns True if :attr:`status_code` is less than 400, False if not. jpayne@68: jpayne@68: This attribute checks if the status code of the response is between jpayne@68: 400 and 600 to see if there was a client error or a server error. If jpayne@68: the status code is between 200 and 400, this will return True. This jpayne@68: is **not** a check to see if the response code is ``200 OK``. jpayne@68: """ jpayne@68: try: jpayne@68: self.raise_for_status() jpayne@68: except HTTPError: jpayne@68: return False jpayne@68: return True jpayne@68: jpayne@68: @property jpayne@68: def is_redirect(self): jpayne@68: """True if this Response is a well-formed HTTP redirect that could have jpayne@68: been processed automatically (by :meth:`Session.resolve_redirects`). jpayne@68: """ jpayne@68: return "location" in self.headers and self.status_code in REDIRECT_STATI jpayne@68: jpayne@68: @property jpayne@68: def is_permanent_redirect(self): jpayne@68: """True if this Response one of the permanent versions of redirect.""" jpayne@68: return "location" in self.headers and self.status_code in ( jpayne@68: codes.moved_permanently, jpayne@68: codes.permanent_redirect, jpayne@68: ) jpayne@68: jpayne@68: @property jpayne@68: def next(self): jpayne@68: """Returns a PreparedRequest for the next request in a redirect chain, if there is one.""" jpayne@68: return self._next jpayne@68: jpayne@68: @property jpayne@68: def apparent_encoding(self): jpayne@68: """The apparent encoding, provided by the charset_normalizer or chardet libraries.""" jpayne@68: if chardet is not None: jpayne@68: return chardet.detect(self.content)["encoding"] jpayne@68: else: jpayne@68: # If no character detection library is available, we'll fall back jpayne@68: # to a standard Python utf-8 str. jpayne@68: return "utf-8" jpayne@68: jpayne@68: def iter_content(self, chunk_size=1, decode_unicode=False): jpayne@68: """Iterates over the response data. When stream=True is set on the jpayne@68: request, this avoids reading the content at once into memory for jpayne@68: large responses. The chunk size is the number of bytes it should jpayne@68: read into memory. This is not necessarily the length of each item jpayne@68: returned as decoding can take place. jpayne@68: jpayne@68: chunk_size must be of type int or None. A value of None will jpayne@68: function differently depending on the value of `stream`. jpayne@68: stream=True will read data as it arrives in whatever size the jpayne@68: chunks are received. If stream=False, data is returned as jpayne@68: a single chunk. jpayne@68: jpayne@68: If decode_unicode is True, content will be decoded using the best jpayne@68: available encoding based on the response. jpayne@68: """ jpayne@68: jpayne@68: def generate(): jpayne@68: # Special case for urllib3. jpayne@68: if hasattr(self.raw, "stream"): jpayne@68: try: jpayne@68: yield from self.raw.stream(chunk_size, decode_content=True) jpayne@68: except ProtocolError as e: jpayne@68: raise ChunkedEncodingError(e) jpayne@68: except DecodeError as e: jpayne@68: raise ContentDecodingError(e) jpayne@68: except ReadTimeoutError as e: jpayne@68: raise ConnectionError(e) jpayne@68: except SSLError as e: jpayne@68: raise RequestsSSLError(e) jpayne@68: else: jpayne@68: # Standard file-like object. jpayne@68: while True: jpayne@68: chunk = self.raw.read(chunk_size) jpayne@68: if not chunk: jpayne@68: break jpayne@68: yield chunk jpayne@68: jpayne@68: self._content_consumed = True jpayne@68: jpayne@68: if self._content_consumed and isinstance(self._content, bool): jpayne@68: raise StreamConsumedError() jpayne@68: elif chunk_size is not None and not isinstance(chunk_size, int): jpayne@68: raise TypeError( jpayne@68: f"chunk_size must be an int, it is instead a {type(chunk_size)}." jpayne@68: ) jpayne@68: # simulate reading small chunks of the content jpayne@68: reused_chunks = iter_slices(self._content, chunk_size) jpayne@68: jpayne@68: stream_chunks = generate() jpayne@68: jpayne@68: chunks = reused_chunks if self._content_consumed else stream_chunks jpayne@68: jpayne@68: if decode_unicode: jpayne@68: chunks = stream_decode_response_unicode(chunks, self) jpayne@68: jpayne@68: return chunks jpayne@68: jpayne@68: def iter_lines( jpayne@68: self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None jpayne@68: ): jpayne@68: """Iterates over the response data, one line at a time. When jpayne@68: stream=True is set on the request, this avoids reading the jpayne@68: content at once into memory for large responses. jpayne@68: jpayne@68: .. note:: This method is not reentrant safe. jpayne@68: """ jpayne@68: jpayne@68: pending = None jpayne@68: jpayne@68: for chunk in self.iter_content( jpayne@68: chunk_size=chunk_size, decode_unicode=decode_unicode jpayne@68: ): jpayne@68: if pending is not None: jpayne@68: chunk = pending + chunk jpayne@68: jpayne@68: if delimiter: jpayne@68: lines = chunk.split(delimiter) jpayne@68: else: jpayne@68: lines = chunk.splitlines() jpayne@68: jpayne@68: if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]: jpayne@68: pending = lines.pop() jpayne@68: else: jpayne@68: pending = None jpayne@68: jpayne@68: yield from lines jpayne@68: jpayne@68: if pending is not None: jpayne@68: yield pending jpayne@68: jpayne@68: @property jpayne@68: def content(self): jpayne@68: """Content of the response, in bytes.""" jpayne@68: jpayne@68: if self._content is False: jpayne@68: # Read the contents. jpayne@68: if self._content_consumed: jpayne@68: raise RuntimeError("The content for this response was already consumed") jpayne@68: jpayne@68: if self.status_code == 0 or self.raw is None: jpayne@68: self._content = None jpayne@68: else: jpayne@68: self._content = b"".join(self.iter_content(CONTENT_CHUNK_SIZE)) or b"" jpayne@68: jpayne@68: self._content_consumed = True jpayne@68: # don't need to release the connection; that's been handled by urllib3 jpayne@68: # since we exhausted the data. jpayne@68: return self._content jpayne@68: jpayne@68: @property jpayne@68: def text(self): jpayne@68: """Content of the response, in unicode. jpayne@68: jpayne@68: If Response.encoding is None, encoding will be guessed using jpayne@68: ``charset_normalizer`` or ``chardet``. jpayne@68: jpayne@68: The encoding of the response content is determined based solely on HTTP jpayne@68: headers, following RFC 2616 to the letter. If you can take advantage of jpayne@68: non-HTTP knowledge to make a better guess at the encoding, you should jpayne@68: set ``r.encoding`` appropriately before accessing this property. jpayne@68: """ jpayne@68: jpayne@68: # Try charset from content-type jpayne@68: content = None jpayne@68: encoding = self.encoding jpayne@68: jpayne@68: if not self.content: jpayne@68: return "" jpayne@68: jpayne@68: # Fallback to auto-detected encoding. jpayne@68: if self.encoding is None: jpayne@68: encoding = self.apparent_encoding jpayne@68: jpayne@68: # Decode unicode from given encoding. jpayne@68: try: jpayne@68: content = str(self.content, encoding, errors="replace") jpayne@68: except (LookupError, TypeError): jpayne@68: # A LookupError is raised if the encoding was not found which could jpayne@68: # indicate a misspelling or similar mistake. jpayne@68: # jpayne@68: # A TypeError can be raised if encoding is None jpayne@68: # jpayne@68: # So we try blindly encoding. jpayne@68: content = str(self.content, errors="replace") jpayne@68: jpayne@68: return content jpayne@68: jpayne@68: def json(self, **kwargs): jpayne@68: r"""Returns the json-encoded content of a response, if any. jpayne@68: jpayne@68: :param \*\*kwargs: Optional arguments that ``json.loads`` takes. jpayne@68: :raises requests.exceptions.JSONDecodeError: If the response body does not jpayne@68: contain valid json. jpayne@68: """ jpayne@68: jpayne@68: if not self.encoding and self.content and len(self.content) > 3: jpayne@68: # No encoding set. JSON RFC 4627 section 3 states we should expect jpayne@68: # UTF-8, -16 or -32. Detect which one to use; If the detection or jpayne@68: # decoding fails, fall back to `self.text` (using charset_normalizer to make jpayne@68: # a best guess). jpayne@68: encoding = guess_json_utf(self.content) jpayne@68: if encoding is not None: jpayne@68: try: jpayne@68: return complexjson.loads(self.content.decode(encoding), **kwargs) jpayne@68: except UnicodeDecodeError: jpayne@68: # Wrong UTF codec detected; usually because it's not UTF-8 jpayne@68: # but some other 8-bit codec. This is an RFC violation, jpayne@68: # and the server didn't bother to tell us what codec *was* jpayne@68: # used. jpayne@68: pass jpayne@68: except JSONDecodeError as e: jpayne@68: raise RequestsJSONDecodeError(e.msg, e.doc, e.pos) jpayne@68: jpayne@68: try: jpayne@68: return complexjson.loads(self.text, **kwargs) jpayne@68: except JSONDecodeError as e: jpayne@68: # Catch JSON-related errors and raise as requests.JSONDecodeError jpayne@68: # This aliases json.JSONDecodeError and simplejson.JSONDecodeError jpayne@68: raise RequestsJSONDecodeError(e.msg, e.doc, e.pos) jpayne@68: jpayne@68: @property jpayne@68: def links(self): jpayne@68: """Returns the parsed header links of the response, if any.""" jpayne@68: jpayne@68: header = self.headers.get("link") jpayne@68: jpayne@68: resolved_links = {} jpayne@68: jpayne@68: if header: jpayne@68: links = parse_header_links(header) jpayne@68: jpayne@68: for link in links: jpayne@68: key = link.get("rel") or link.get("url") jpayne@68: resolved_links[key] = link jpayne@68: jpayne@68: return resolved_links jpayne@68: jpayne@68: def raise_for_status(self): jpayne@68: """Raises :class:`HTTPError`, if one occurred.""" jpayne@68: jpayne@68: http_error_msg = "" jpayne@68: if isinstance(self.reason, bytes): jpayne@68: # We attempt to decode utf-8 first because some servers jpayne@68: # choose to localize their reason strings. If the string jpayne@68: # isn't utf-8, we fall back to iso-8859-1 for all other jpayne@68: # encodings. (See PR #3538) jpayne@68: try: jpayne@68: reason = self.reason.decode("utf-8") jpayne@68: except UnicodeDecodeError: jpayne@68: reason = self.reason.decode("iso-8859-1") jpayne@68: else: jpayne@68: reason = self.reason jpayne@68: jpayne@68: if 400 <= self.status_code < 500: jpayne@68: http_error_msg = ( jpayne@68: f"{self.status_code} Client Error: {reason} for url: {self.url}" jpayne@68: ) jpayne@68: jpayne@68: elif 500 <= self.status_code < 600: jpayne@68: http_error_msg = ( jpayne@68: f"{self.status_code} Server Error: {reason} for url: {self.url}" jpayne@68: ) jpayne@68: jpayne@68: if http_error_msg: jpayne@68: raise HTTPError(http_error_msg, response=self) jpayne@68: jpayne@68: def close(self): jpayne@68: """Releases the connection back to the pool. Once this method has been jpayne@68: called the underlying ``raw`` object must not be accessed again. jpayne@68: jpayne@68: *Note: Should not normally need to be called explicitly.* jpayne@68: """ jpayne@68: if not self._content_consumed: jpayne@68: self.raw.close() jpayne@68: jpayne@68: release_conn = getattr(self.raw, "release_conn", None) jpayne@68: if release_conn is not None: jpayne@68: release_conn()