jpayne@69: """ jpayne@69: requests.models jpayne@69: ~~~~~~~~~~~~~~~ jpayne@69: jpayne@69: This module contains the primary objects that power Requests. jpayne@69: """ jpayne@69: jpayne@69: import datetime jpayne@69: jpayne@69: # Import encoding now, to avoid implicit import later. jpayne@69: # Implicit import within threads may cause LookupError when standard library is in a ZIP, jpayne@69: # such as in Embedded Python. See https://github.com/psf/requests/issues/3578. jpayne@69: import encodings.idna # noqa: F401 jpayne@69: from io import UnsupportedOperation jpayne@69: jpayne@69: from urllib3.exceptions import ( jpayne@69: DecodeError, jpayne@69: LocationParseError, jpayne@69: ProtocolError, jpayne@69: ReadTimeoutError, jpayne@69: SSLError, jpayne@69: ) jpayne@69: from urllib3.fields import RequestField jpayne@69: from urllib3.filepost import encode_multipart_formdata jpayne@69: from urllib3.util import parse_url jpayne@69: jpayne@69: from ._internal_utils import to_native_string, unicode_is_ascii jpayne@69: from .auth import HTTPBasicAuth jpayne@69: from .compat import ( jpayne@69: Callable, jpayne@69: JSONDecodeError, jpayne@69: Mapping, jpayne@69: basestring, jpayne@69: builtin_str, jpayne@69: chardet, jpayne@69: cookielib, jpayne@69: ) jpayne@69: from .compat import json as complexjson jpayne@69: from .compat import urlencode, urlsplit, urlunparse jpayne@69: from .cookies import _copy_cookie_jar, cookiejar_from_dict, get_cookie_header jpayne@69: from .exceptions import ( jpayne@69: ChunkedEncodingError, jpayne@69: ConnectionError, jpayne@69: ContentDecodingError, jpayne@69: HTTPError, jpayne@69: InvalidJSONError, jpayne@69: InvalidURL, jpayne@69: ) jpayne@69: from .exceptions import JSONDecodeError as RequestsJSONDecodeError jpayne@69: from .exceptions import MissingSchema jpayne@69: from .exceptions import SSLError as RequestsSSLError jpayne@69: from .exceptions import StreamConsumedError jpayne@69: from .hooks import default_hooks jpayne@69: from .status_codes import codes jpayne@69: from .structures import CaseInsensitiveDict jpayne@69: from .utils import ( jpayne@69: check_header_validity, jpayne@69: get_auth_from_url, jpayne@69: guess_filename, jpayne@69: guess_json_utf, jpayne@69: iter_slices, jpayne@69: parse_header_links, jpayne@69: requote_uri, jpayne@69: stream_decode_response_unicode, jpayne@69: super_len, jpayne@69: to_key_val_list, jpayne@69: ) jpayne@69: jpayne@69: #: The set of HTTP status codes that indicate an automatically jpayne@69: #: processable redirect. jpayne@69: REDIRECT_STATI = ( jpayne@69: codes.moved, # 301 jpayne@69: codes.found, # 302 jpayne@69: codes.other, # 303 jpayne@69: codes.temporary_redirect, # 307 jpayne@69: codes.permanent_redirect, # 308 jpayne@69: ) jpayne@69: jpayne@69: DEFAULT_REDIRECT_LIMIT = 30 jpayne@69: CONTENT_CHUNK_SIZE = 10 * 1024 jpayne@69: ITER_CHUNK_SIZE = 512 jpayne@69: jpayne@69: jpayne@69: class RequestEncodingMixin: jpayne@69: @property jpayne@69: def path_url(self): jpayne@69: """Build the path URL to use.""" jpayne@69: jpayne@69: url = [] jpayne@69: jpayne@69: p = urlsplit(self.url) jpayne@69: jpayne@69: path = p.path jpayne@69: if not path: jpayne@69: path = "/" jpayne@69: jpayne@69: url.append(path) jpayne@69: jpayne@69: query = p.query jpayne@69: if query: jpayne@69: url.append("?") jpayne@69: url.append(query) jpayne@69: jpayne@69: return "".join(url) jpayne@69: jpayne@69: @staticmethod jpayne@69: def _encode_params(data): jpayne@69: """Encode parameters in a piece of data. jpayne@69: jpayne@69: Will successfully encode parameters when passed as a dict or a list of jpayne@69: 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary jpayne@69: if parameters are supplied as a dict. jpayne@69: """ jpayne@69: jpayne@69: if isinstance(data, (str, bytes)): jpayne@69: return data jpayne@69: elif hasattr(data, "read"): jpayne@69: return data jpayne@69: elif hasattr(data, "__iter__"): jpayne@69: result = [] jpayne@69: for k, vs in to_key_val_list(data): jpayne@69: if isinstance(vs, basestring) or not hasattr(vs, "__iter__"): jpayne@69: vs = [vs] jpayne@69: for v in vs: jpayne@69: if v is not None: jpayne@69: result.append( jpayne@69: ( jpayne@69: k.encode("utf-8") if isinstance(k, str) else k, jpayne@69: v.encode("utf-8") if isinstance(v, str) else v, jpayne@69: ) jpayne@69: ) jpayne@69: return urlencode(result, doseq=True) jpayne@69: else: jpayne@69: return data jpayne@69: jpayne@69: @staticmethod jpayne@69: def _encode_files(files, data): jpayne@69: """Build the body for a multipart/form-data request. jpayne@69: jpayne@69: Will successfully encode files when passed as a dict or a list of jpayne@69: tuples. Order is retained if data is a list of tuples but arbitrary jpayne@69: if parameters are supplied as a dict. jpayne@69: The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) jpayne@69: or 4-tuples (filename, fileobj, contentype, custom_headers). jpayne@69: """ jpayne@69: if not files: jpayne@69: raise ValueError("Files must be provided.") jpayne@69: elif isinstance(data, basestring): jpayne@69: raise ValueError("Data must not be a string.") jpayne@69: jpayne@69: new_fields = [] jpayne@69: fields = to_key_val_list(data or {}) jpayne@69: files = to_key_val_list(files or {}) jpayne@69: jpayne@69: for field, val in fields: jpayne@69: if isinstance(val, basestring) or not hasattr(val, "__iter__"): jpayne@69: val = [val] jpayne@69: for v in val: jpayne@69: if v is not None: jpayne@69: # Don't call str() on bytestrings: in Py3 it all goes wrong. jpayne@69: if not isinstance(v, bytes): jpayne@69: v = str(v) jpayne@69: jpayne@69: new_fields.append( jpayne@69: ( jpayne@69: field.decode("utf-8") jpayne@69: if isinstance(field, bytes) jpayne@69: else field, jpayne@69: v.encode("utf-8") if isinstance(v, str) else v, jpayne@69: ) jpayne@69: ) jpayne@69: jpayne@69: for k, v in files: jpayne@69: # support for explicit filename jpayne@69: ft = None jpayne@69: fh = None jpayne@69: if isinstance(v, (tuple, list)): jpayne@69: if len(v) == 2: jpayne@69: fn, fp = v jpayne@69: elif len(v) == 3: jpayne@69: fn, fp, ft = v jpayne@69: else: jpayne@69: fn, fp, ft, fh = v jpayne@69: else: jpayne@69: fn = guess_filename(v) or k jpayne@69: fp = v jpayne@69: jpayne@69: if isinstance(fp, (str, bytes, bytearray)): jpayne@69: fdata = fp jpayne@69: elif hasattr(fp, "read"): jpayne@69: fdata = fp.read() jpayne@69: elif fp is None: jpayne@69: continue jpayne@69: else: jpayne@69: fdata = fp jpayne@69: jpayne@69: rf = RequestField(name=k, data=fdata, filename=fn, headers=fh) jpayne@69: rf.make_multipart(content_type=ft) jpayne@69: new_fields.append(rf) jpayne@69: jpayne@69: body, content_type = encode_multipart_formdata(new_fields) jpayne@69: jpayne@69: return body, content_type jpayne@69: jpayne@69: jpayne@69: class RequestHooksMixin: jpayne@69: def register_hook(self, event, hook): jpayne@69: """Properly register a hook.""" jpayne@69: jpayne@69: if event not in self.hooks: jpayne@69: raise ValueError(f'Unsupported event specified, with event name "{event}"') jpayne@69: jpayne@69: if isinstance(hook, Callable): jpayne@69: self.hooks[event].append(hook) jpayne@69: elif hasattr(hook, "__iter__"): jpayne@69: self.hooks[event].extend(h for h in hook if isinstance(h, Callable)) jpayne@69: jpayne@69: def deregister_hook(self, event, hook): jpayne@69: """Deregister a previously registered hook. jpayne@69: Returns True if the hook existed, False if not. jpayne@69: """ jpayne@69: jpayne@69: try: jpayne@69: self.hooks[event].remove(hook) jpayne@69: return True jpayne@69: except ValueError: jpayne@69: return False jpayne@69: jpayne@69: jpayne@69: class Request(RequestHooksMixin): jpayne@69: """A user-created :class:`Request ` object. jpayne@69: jpayne@69: Used to prepare a :class:`PreparedRequest `, which is sent to the server. jpayne@69: jpayne@69: :param method: HTTP method to use. jpayne@69: :param url: URL to send. jpayne@69: :param headers: dictionary of headers to send. jpayne@69: :param files: dictionary of {filename: fileobject} files to multipart upload. jpayne@69: :param data: the body to attach to the request. If a dictionary or jpayne@69: list of tuples ``[(key, value)]`` is provided, form-encoding will jpayne@69: take place. jpayne@69: :param json: json for the body to attach to the request (if files or data is not specified). jpayne@69: :param params: URL parameters to append to the URL. If a dictionary or jpayne@69: list of tuples ``[(key, value)]`` is provided, form-encoding will jpayne@69: take place. jpayne@69: :param auth: Auth handler or (user, pass) tuple. jpayne@69: :param cookies: dictionary or CookieJar of cookies to attach to this request. jpayne@69: :param hooks: dictionary of callback hooks, for internal usage. jpayne@69: jpayne@69: Usage:: jpayne@69: jpayne@69: >>> import requests jpayne@69: >>> req = requests.Request('GET', 'https://httpbin.org/get') jpayne@69: >>> req.prepare() jpayne@69: jpayne@69: """ jpayne@69: jpayne@69: def __init__( jpayne@69: self, jpayne@69: method=None, jpayne@69: url=None, jpayne@69: headers=None, jpayne@69: files=None, jpayne@69: data=None, jpayne@69: params=None, jpayne@69: auth=None, jpayne@69: cookies=None, jpayne@69: hooks=None, jpayne@69: json=None, jpayne@69: ): jpayne@69: # Default empty dicts for dict params. jpayne@69: data = [] if data is None else data jpayne@69: files = [] if files is None else files jpayne@69: headers = {} if headers is None else headers jpayne@69: params = {} if params is None else params jpayne@69: hooks = {} if hooks is None else hooks jpayne@69: jpayne@69: self.hooks = default_hooks() jpayne@69: for k, v in list(hooks.items()): jpayne@69: self.register_hook(event=k, hook=v) jpayne@69: jpayne@69: self.method = method jpayne@69: self.url = url jpayne@69: self.headers = headers jpayne@69: self.files = files jpayne@69: self.data = data jpayne@69: self.json = json jpayne@69: self.params = params jpayne@69: self.auth = auth jpayne@69: self.cookies = cookies jpayne@69: jpayne@69: def __repr__(self): jpayne@69: return f"" jpayne@69: jpayne@69: def prepare(self): jpayne@69: """Constructs a :class:`PreparedRequest ` for transmission and returns it.""" jpayne@69: p = PreparedRequest() jpayne@69: p.prepare( jpayne@69: method=self.method, jpayne@69: url=self.url, jpayne@69: headers=self.headers, jpayne@69: files=self.files, jpayne@69: data=self.data, jpayne@69: json=self.json, jpayne@69: params=self.params, jpayne@69: auth=self.auth, jpayne@69: cookies=self.cookies, jpayne@69: hooks=self.hooks, jpayne@69: ) jpayne@69: return p jpayne@69: jpayne@69: jpayne@69: class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): jpayne@69: """The fully mutable :class:`PreparedRequest ` object, jpayne@69: containing the exact bytes that will be sent to the server. jpayne@69: jpayne@69: Instances are generated from a :class:`Request ` object, and jpayne@69: should not be instantiated manually; doing so may produce undesirable jpayne@69: effects. jpayne@69: jpayne@69: Usage:: jpayne@69: jpayne@69: >>> import requests jpayne@69: >>> req = requests.Request('GET', 'https://httpbin.org/get') jpayne@69: >>> r = req.prepare() jpayne@69: >>> r jpayne@69: jpayne@69: jpayne@69: >>> s = requests.Session() jpayne@69: >>> s.send(r) jpayne@69: jpayne@69: """ jpayne@69: jpayne@69: def __init__(self): jpayne@69: #: HTTP verb to send to the server. jpayne@69: self.method = None jpayne@69: #: HTTP URL to send the request to. jpayne@69: self.url = None jpayne@69: #: dictionary of HTTP headers. jpayne@69: self.headers = None jpayne@69: # The `CookieJar` used to create the Cookie header will be stored here jpayne@69: # after prepare_cookies is called jpayne@69: self._cookies = None jpayne@69: #: request body to send to the server. jpayne@69: self.body = None jpayne@69: #: dictionary of callback hooks, for internal usage. jpayne@69: self.hooks = default_hooks() jpayne@69: #: integer denoting starting position of a readable file-like body. jpayne@69: self._body_position = None jpayne@69: jpayne@69: def prepare( jpayne@69: self, jpayne@69: method=None, jpayne@69: url=None, jpayne@69: headers=None, jpayne@69: files=None, jpayne@69: data=None, jpayne@69: params=None, jpayne@69: auth=None, jpayne@69: cookies=None, jpayne@69: hooks=None, jpayne@69: json=None, jpayne@69: ): jpayne@69: """Prepares the entire request with the given parameters.""" jpayne@69: jpayne@69: self.prepare_method(method) jpayne@69: self.prepare_url(url, params) jpayne@69: self.prepare_headers(headers) jpayne@69: self.prepare_cookies(cookies) jpayne@69: self.prepare_body(data, files, json) jpayne@69: self.prepare_auth(auth, url) jpayne@69: jpayne@69: # Note that prepare_auth must be last to enable authentication schemes jpayne@69: # such as OAuth to work on a fully prepared request. jpayne@69: jpayne@69: # This MUST go after prepare_auth. Authenticators could add a hook jpayne@69: self.prepare_hooks(hooks) jpayne@69: jpayne@69: def __repr__(self): jpayne@69: return f"" jpayne@69: jpayne@69: def copy(self): jpayne@69: p = PreparedRequest() jpayne@69: p.method = self.method jpayne@69: p.url = self.url jpayne@69: p.headers = self.headers.copy() if self.headers is not None else None jpayne@69: p._cookies = _copy_cookie_jar(self._cookies) jpayne@69: p.body = self.body jpayne@69: p.hooks = self.hooks jpayne@69: p._body_position = self._body_position jpayne@69: return p jpayne@69: jpayne@69: def prepare_method(self, method): jpayne@69: """Prepares the given HTTP method.""" jpayne@69: self.method = method jpayne@69: if self.method is not None: jpayne@69: self.method = to_native_string(self.method.upper()) jpayne@69: jpayne@69: @staticmethod jpayne@69: def _get_idna_encoded_host(host): jpayne@69: import idna jpayne@69: jpayne@69: try: jpayne@69: host = idna.encode(host, uts46=True).decode("utf-8") jpayne@69: except idna.IDNAError: jpayne@69: raise UnicodeError jpayne@69: return host jpayne@69: jpayne@69: def prepare_url(self, url, params): jpayne@69: """Prepares the given HTTP URL.""" jpayne@69: #: Accept objects that have string representations. jpayne@69: #: We're unable to blindly call unicode/str functions jpayne@69: #: as this will include the bytestring indicator (b'') jpayne@69: #: on python 3.x. jpayne@69: #: https://github.com/psf/requests/pull/2238 jpayne@69: if isinstance(url, bytes): jpayne@69: url = url.decode("utf8") jpayne@69: else: jpayne@69: url = str(url) jpayne@69: jpayne@69: # Remove leading whitespaces from url jpayne@69: url = url.lstrip() jpayne@69: jpayne@69: # Don't do any URL preparation for non-HTTP schemes like `mailto`, jpayne@69: # `data` etc to work around exceptions from `url_parse`, which jpayne@69: # handles RFC 3986 only. jpayne@69: if ":" in url and not url.lower().startswith("http"): jpayne@69: self.url = url jpayne@69: return jpayne@69: jpayne@69: # Support for unicode domain names and paths. jpayne@69: try: jpayne@69: scheme, auth, host, port, path, query, fragment = parse_url(url) jpayne@69: except LocationParseError as e: jpayne@69: raise InvalidURL(*e.args) jpayne@69: jpayne@69: if not scheme: jpayne@69: raise MissingSchema( jpayne@69: f"Invalid URL {url!r}: No scheme supplied. " jpayne@69: f"Perhaps you meant https://{url}?" jpayne@69: ) jpayne@69: jpayne@69: if not host: jpayne@69: raise InvalidURL(f"Invalid URL {url!r}: No host supplied") jpayne@69: jpayne@69: # In general, we want to try IDNA encoding the hostname if the string contains jpayne@69: # non-ASCII characters. This allows users to automatically get the correct IDNA jpayne@69: # behaviour. For strings containing only ASCII characters, we need to also verify jpayne@69: # it doesn't start with a wildcard (*), before allowing the unencoded hostname. jpayne@69: if not unicode_is_ascii(host): jpayne@69: try: jpayne@69: host = self._get_idna_encoded_host(host) jpayne@69: except UnicodeError: jpayne@69: raise InvalidURL("URL has an invalid label.") jpayne@69: elif host.startswith(("*", ".")): jpayne@69: raise InvalidURL("URL has an invalid label.") jpayne@69: jpayne@69: # Carefully reconstruct the network location jpayne@69: netloc = auth or "" jpayne@69: if netloc: jpayne@69: netloc += "@" jpayne@69: netloc += host jpayne@69: if port: jpayne@69: netloc += f":{port}" jpayne@69: jpayne@69: # Bare domains aren't valid URLs. jpayne@69: if not path: jpayne@69: path = "/" jpayne@69: jpayne@69: if isinstance(params, (str, bytes)): jpayne@69: params = to_native_string(params) jpayne@69: jpayne@69: enc_params = self._encode_params(params) jpayne@69: if enc_params: jpayne@69: if query: jpayne@69: query = f"{query}&{enc_params}" jpayne@69: else: jpayne@69: query = enc_params jpayne@69: jpayne@69: url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment])) jpayne@69: self.url = url jpayne@69: jpayne@69: def prepare_headers(self, headers): jpayne@69: """Prepares the given HTTP headers.""" jpayne@69: jpayne@69: self.headers = CaseInsensitiveDict() jpayne@69: if headers: jpayne@69: for header in headers.items(): jpayne@69: # Raise exception on invalid header value. jpayne@69: check_header_validity(header) jpayne@69: name, value = header jpayne@69: self.headers[to_native_string(name)] = value jpayne@69: jpayne@69: def prepare_body(self, data, files, json=None): jpayne@69: """Prepares the given HTTP body data.""" jpayne@69: jpayne@69: # Check if file, fo, generator, iterator. jpayne@69: # If not, run through normal process. jpayne@69: jpayne@69: # Nottin' on you. jpayne@69: body = None jpayne@69: content_type = None jpayne@69: jpayne@69: if not data and json is not None: jpayne@69: # urllib3 requires a bytes-like body. Python 2's json.dumps jpayne@69: # provides this natively, but Python 3 gives a Unicode string. jpayne@69: content_type = "application/json" jpayne@69: jpayne@69: try: jpayne@69: body = complexjson.dumps(json, allow_nan=False) jpayne@69: except ValueError as ve: jpayne@69: raise InvalidJSONError(ve, request=self) jpayne@69: jpayne@69: if not isinstance(body, bytes): jpayne@69: body = body.encode("utf-8") jpayne@69: jpayne@69: is_stream = all( jpayne@69: [ jpayne@69: hasattr(data, "__iter__"), jpayne@69: not isinstance(data, (basestring, list, tuple, Mapping)), jpayne@69: ] jpayne@69: ) jpayne@69: jpayne@69: if is_stream: jpayne@69: try: jpayne@69: length = super_len(data) jpayne@69: except (TypeError, AttributeError, UnsupportedOperation): jpayne@69: length = None jpayne@69: jpayne@69: body = data jpayne@69: jpayne@69: if getattr(body, "tell", None) is not None: jpayne@69: # Record the current file position before reading. jpayne@69: # This will allow us to rewind a file in the event jpayne@69: # of a redirect. jpayne@69: try: jpayne@69: self._body_position = body.tell() jpayne@69: except OSError: jpayne@69: # This differentiates from None, allowing us to catch jpayne@69: # a failed `tell()` later when trying to rewind the body jpayne@69: self._body_position = object() jpayne@69: jpayne@69: if files: jpayne@69: raise NotImplementedError( jpayne@69: "Streamed bodies and files are mutually exclusive." jpayne@69: ) jpayne@69: jpayne@69: if length: jpayne@69: self.headers["Content-Length"] = builtin_str(length) jpayne@69: else: jpayne@69: self.headers["Transfer-Encoding"] = "chunked" jpayne@69: else: jpayne@69: # Multi-part file uploads. jpayne@69: if files: jpayne@69: (body, content_type) = self._encode_files(files, data) jpayne@69: else: jpayne@69: if data: jpayne@69: body = self._encode_params(data) jpayne@69: if isinstance(data, basestring) or hasattr(data, "read"): jpayne@69: content_type = None jpayne@69: else: jpayne@69: content_type = "application/x-www-form-urlencoded" jpayne@69: jpayne@69: self.prepare_content_length(body) jpayne@69: jpayne@69: # Add content-type if it wasn't explicitly provided. jpayne@69: if content_type and ("content-type" not in self.headers): jpayne@69: self.headers["Content-Type"] = content_type jpayne@69: jpayne@69: self.body = body jpayne@69: jpayne@69: def prepare_content_length(self, body): jpayne@69: """Prepare Content-Length header based on request method and body""" jpayne@69: if body is not None: jpayne@69: length = super_len(body) jpayne@69: if length: jpayne@69: # If length exists, set it. Otherwise, we fallback jpayne@69: # to Transfer-Encoding: chunked. jpayne@69: self.headers["Content-Length"] = builtin_str(length) jpayne@69: elif ( jpayne@69: self.method not in ("GET", "HEAD") jpayne@69: and self.headers.get("Content-Length") is None jpayne@69: ): jpayne@69: # Set Content-Length to 0 for methods that can have a body jpayne@69: # but don't provide one. (i.e. not GET or HEAD) jpayne@69: self.headers["Content-Length"] = "0" jpayne@69: jpayne@69: def prepare_auth(self, auth, url=""): jpayne@69: """Prepares the given HTTP auth data.""" jpayne@69: jpayne@69: # If no Auth is explicitly provided, extract it from the URL first. jpayne@69: if auth is None: jpayne@69: url_auth = get_auth_from_url(self.url) jpayne@69: auth = url_auth if any(url_auth) else None jpayne@69: jpayne@69: if auth: jpayne@69: if isinstance(auth, tuple) and len(auth) == 2: jpayne@69: # special-case basic HTTP auth jpayne@69: auth = HTTPBasicAuth(*auth) jpayne@69: jpayne@69: # Allow auth to make its changes. jpayne@69: r = auth(self) jpayne@69: jpayne@69: # Update self to reflect the auth changes. jpayne@69: self.__dict__.update(r.__dict__) jpayne@69: jpayne@69: # Recompute Content-Length jpayne@69: self.prepare_content_length(self.body) jpayne@69: jpayne@69: def prepare_cookies(self, cookies): jpayne@69: """Prepares the given HTTP cookie data. jpayne@69: jpayne@69: This function eventually generates a ``Cookie`` header from the jpayne@69: given cookies using cookielib. Due to cookielib's design, the header jpayne@69: will not be regenerated if it already exists, meaning this function jpayne@69: can only be called once for the life of the jpayne@69: :class:`PreparedRequest ` object. Any subsequent calls jpayne@69: to ``prepare_cookies`` will have no actual effect, unless the "Cookie" jpayne@69: header is removed beforehand. jpayne@69: """ jpayne@69: if isinstance(cookies, cookielib.CookieJar): jpayne@69: self._cookies = cookies jpayne@69: else: jpayne@69: self._cookies = cookiejar_from_dict(cookies) jpayne@69: jpayne@69: cookie_header = get_cookie_header(self._cookies, self) jpayne@69: if cookie_header is not None: jpayne@69: self.headers["Cookie"] = cookie_header jpayne@69: jpayne@69: def prepare_hooks(self, hooks): jpayne@69: """Prepares the given hooks.""" jpayne@69: # hooks can be passed as None to the prepare method and to this jpayne@69: # method. To prevent iterating over None, simply use an empty list jpayne@69: # if hooks is False-y jpayne@69: hooks = hooks or [] jpayne@69: for event in hooks: jpayne@69: self.register_hook(event, hooks[event]) jpayne@69: jpayne@69: jpayne@69: class Response: jpayne@69: """The :class:`Response ` object, which contains a jpayne@69: server's response to an HTTP request. jpayne@69: """ jpayne@69: jpayne@69: __attrs__ = [ jpayne@69: "_content", jpayne@69: "status_code", jpayne@69: "headers", jpayne@69: "url", jpayne@69: "history", jpayne@69: "encoding", jpayne@69: "reason", jpayne@69: "cookies", jpayne@69: "elapsed", jpayne@69: "request", jpayne@69: ] jpayne@69: jpayne@69: def __init__(self): jpayne@69: self._content = False jpayne@69: self._content_consumed = False jpayne@69: self._next = None jpayne@69: jpayne@69: #: Integer Code of responded HTTP Status, e.g. 404 or 200. jpayne@69: self.status_code = None jpayne@69: jpayne@69: #: Case-insensitive Dictionary of Response Headers. jpayne@69: #: For example, ``headers['content-encoding']`` will return the jpayne@69: #: value of a ``'Content-Encoding'`` response header. jpayne@69: self.headers = CaseInsensitiveDict() jpayne@69: jpayne@69: #: File-like object representation of response (for advanced usage). jpayne@69: #: Use of ``raw`` requires that ``stream=True`` be set on the request. jpayne@69: #: This requirement does not apply for use internally to Requests. jpayne@69: self.raw = None jpayne@69: jpayne@69: #: Final URL location of Response. jpayne@69: self.url = None jpayne@69: jpayne@69: #: Encoding to decode with when accessing r.text. jpayne@69: self.encoding = None jpayne@69: jpayne@69: #: A list of :class:`Response ` objects from jpayne@69: #: the history of the Request. Any redirect responses will end jpayne@69: #: up here. The list is sorted from the oldest to the most recent request. jpayne@69: self.history = [] jpayne@69: jpayne@69: #: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK". jpayne@69: self.reason = None jpayne@69: jpayne@69: #: A CookieJar of Cookies the server sent back. jpayne@69: self.cookies = cookiejar_from_dict({}) jpayne@69: jpayne@69: #: The amount of time elapsed between sending the request jpayne@69: #: and the arrival of the response (as a timedelta). jpayne@69: #: This property specifically measures the time taken between sending jpayne@69: #: the first byte of the request and finishing parsing the headers. It jpayne@69: #: is therefore unaffected by consuming the response content or the jpayne@69: #: value of the ``stream`` keyword argument. jpayne@69: self.elapsed = datetime.timedelta(0) jpayne@69: jpayne@69: #: The :class:`PreparedRequest ` object to which this jpayne@69: #: is a response. jpayne@69: self.request = None jpayne@69: jpayne@69: def __enter__(self): jpayne@69: return self jpayne@69: jpayne@69: def __exit__(self, *args): jpayne@69: self.close() jpayne@69: jpayne@69: def __getstate__(self): jpayne@69: # Consume everything; accessing the content attribute makes jpayne@69: # sure the content has been fully read. jpayne@69: if not self._content_consumed: jpayne@69: self.content jpayne@69: jpayne@69: return {attr: getattr(self, attr, None) for attr in self.__attrs__} jpayne@69: jpayne@69: def __setstate__(self, state): jpayne@69: for name, value in state.items(): jpayne@69: setattr(self, name, value) jpayne@69: jpayne@69: # pickled objects do not have .raw jpayne@69: setattr(self, "_content_consumed", True) jpayne@69: setattr(self, "raw", None) jpayne@69: jpayne@69: def __repr__(self): jpayne@69: return f"" jpayne@69: jpayne@69: def __bool__(self): jpayne@69: """Returns True if :attr:`status_code` is less than 400. jpayne@69: jpayne@69: This attribute checks if the status code of the response is between jpayne@69: 400 and 600 to see if there was a client error or a server error. If jpayne@69: the status code, is between 200 and 400, this will return True. This jpayne@69: is **not** a check to see if the response code is ``200 OK``. jpayne@69: """ jpayne@69: return self.ok jpayne@69: jpayne@69: def __nonzero__(self): jpayne@69: """Returns True if :attr:`status_code` is less than 400. jpayne@69: jpayne@69: This attribute checks if the status code of the response is between jpayne@69: 400 and 600 to see if there was a client error or a server error. If jpayne@69: the status code, is between 200 and 400, this will return True. This jpayne@69: is **not** a check to see if the response code is ``200 OK``. jpayne@69: """ jpayne@69: return self.ok jpayne@69: jpayne@69: def __iter__(self): jpayne@69: """Allows you to use a response as an iterator.""" jpayne@69: return self.iter_content(128) jpayne@69: jpayne@69: @property jpayne@69: def ok(self): jpayne@69: """Returns True if :attr:`status_code` is less than 400, False if not. jpayne@69: jpayne@69: This attribute checks if the status code of the response is between jpayne@69: 400 and 600 to see if there was a client error or a server error. If jpayne@69: the status code is between 200 and 400, this will return True. This jpayne@69: is **not** a check to see if the response code is ``200 OK``. jpayne@69: """ jpayne@69: try: jpayne@69: self.raise_for_status() jpayne@69: except HTTPError: jpayne@69: return False jpayne@69: return True jpayne@69: jpayne@69: @property jpayne@69: def is_redirect(self): jpayne@69: """True if this Response is a well-formed HTTP redirect that could have jpayne@69: been processed automatically (by :meth:`Session.resolve_redirects`). jpayne@69: """ jpayne@69: return "location" in self.headers and self.status_code in REDIRECT_STATI jpayne@69: jpayne@69: @property jpayne@69: def is_permanent_redirect(self): jpayne@69: """True if this Response one of the permanent versions of redirect.""" jpayne@69: return "location" in self.headers and self.status_code in ( jpayne@69: codes.moved_permanently, jpayne@69: codes.permanent_redirect, jpayne@69: ) jpayne@69: jpayne@69: @property jpayne@69: def next(self): jpayne@69: """Returns a PreparedRequest for the next request in a redirect chain, if there is one.""" jpayne@69: return self._next jpayne@69: jpayne@69: @property jpayne@69: def apparent_encoding(self): jpayne@69: """The apparent encoding, provided by the charset_normalizer or chardet libraries.""" jpayne@69: if chardet is not None: jpayne@69: return chardet.detect(self.content)["encoding"] jpayne@69: else: jpayne@69: # If no character detection library is available, we'll fall back jpayne@69: # to a standard Python utf-8 str. jpayne@69: return "utf-8" jpayne@69: jpayne@69: def iter_content(self, chunk_size=1, decode_unicode=False): jpayne@69: """Iterates over the response data. When stream=True is set on the jpayne@69: request, this avoids reading the content at once into memory for jpayne@69: large responses. The chunk size is the number of bytes it should jpayne@69: read into memory. This is not necessarily the length of each item jpayne@69: returned as decoding can take place. jpayne@69: jpayne@69: chunk_size must be of type int or None. A value of None will jpayne@69: function differently depending on the value of `stream`. jpayne@69: stream=True will read data as it arrives in whatever size the jpayne@69: chunks are received. If stream=False, data is returned as jpayne@69: a single chunk. jpayne@69: jpayne@69: If decode_unicode is True, content will be decoded using the best jpayne@69: available encoding based on the response. jpayne@69: """ jpayne@69: jpayne@69: def generate(): jpayne@69: # Special case for urllib3. jpayne@69: if hasattr(self.raw, "stream"): jpayne@69: try: jpayne@69: yield from self.raw.stream(chunk_size, decode_content=True) jpayne@69: except ProtocolError as e: jpayne@69: raise ChunkedEncodingError(e) jpayne@69: except DecodeError as e: jpayne@69: raise ContentDecodingError(e) jpayne@69: except ReadTimeoutError as e: jpayne@69: raise ConnectionError(e) jpayne@69: except SSLError as e: jpayne@69: raise RequestsSSLError(e) jpayne@69: else: jpayne@69: # Standard file-like object. jpayne@69: while True: jpayne@69: chunk = self.raw.read(chunk_size) jpayne@69: if not chunk: jpayne@69: break jpayne@69: yield chunk jpayne@69: jpayne@69: self._content_consumed = True jpayne@69: jpayne@69: if self._content_consumed and isinstance(self._content, bool): jpayne@69: raise StreamConsumedError() jpayne@69: elif chunk_size is not None and not isinstance(chunk_size, int): jpayne@69: raise TypeError( jpayne@69: f"chunk_size must be an int, it is instead a {type(chunk_size)}." jpayne@69: ) jpayne@69: # simulate reading small chunks of the content jpayne@69: reused_chunks = iter_slices(self._content, chunk_size) jpayne@69: jpayne@69: stream_chunks = generate() jpayne@69: jpayne@69: chunks = reused_chunks if self._content_consumed else stream_chunks jpayne@69: jpayne@69: if decode_unicode: jpayne@69: chunks = stream_decode_response_unicode(chunks, self) jpayne@69: jpayne@69: return chunks jpayne@69: jpayne@69: def iter_lines( jpayne@69: self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None jpayne@69: ): jpayne@69: """Iterates over the response data, one line at a time. When jpayne@69: stream=True is set on the request, this avoids reading the jpayne@69: content at once into memory for large responses. jpayne@69: jpayne@69: .. note:: This method is not reentrant safe. jpayne@69: """ jpayne@69: jpayne@69: pending = None jpayne@69: jpayne@69: for chunk in self.iter_content( jpayne@69: chunk_size=chunk_size, decode_unicode=decode_unicode jpayne@69: ): jpayne@69: if pending is not None: jpayne@69: chunk = pending + chunk jpayne@69: jpayne@69: if delimiter: jpayne@69: lines = chunk.split(delimiter) jpayne@69: else: jpayne@69: lines = chunk.splitlines() jpayne@69: jpayne@69: if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]: jpayne@69: pending = lines.pop() jpayne@69: else: jpayne@69: pending = None jpayne@69: jpayne@69: yield from lines jpayne@69: jpayne@69: if pending is not None: jpayne@69: yield pending jpayne@69: jpayne@69: @property jpayne@69: def content(self): jpayne@69: """Content of the response, in bytes.""" jpayne@69: jpayne@69: if self._content is False: jpayne@69: # Read the contents. jpayne@69: if self._content_consumed: jpayne@69: raise RuntimeError("The content for this response was already consumed") jpayne@69: jpayne@69: if self.status_code == 0 or self.raw is None: jpayne@69: self._content = None jpayne@69: else: jpayne@69: self._content = b"".join(self.iter_content(CONTENT_CHUNK_SIZE)) or b"" jpayne@69: jpayne@69: self._content_consumed = True jpayne@69: # don't need to release the connection; that's been handled by urllib3 jpayne@69: # since we exhausted the data. jpayne@69: return self._content jpayne@69: jpayne@69: @property jpayne@69: def text(self): jpayne@69: """Content of the response, in unicode. jpayne@69: jpayne@69: If Response.encoding is None, encoding will be guessed using jpayne@69: ``charset_normalizer`` or ``chardet``. jpayne@69: jpayne@69: The encoding of the response content is determined based solely on HTTP jpayne@69: headers, following RFC 2616 to the letter. If you can take advantage of jpayne@69: non-HTTP knowledge to make a better guess at the encoding, you should jpayne@69: set ``r.encoding`` appropriately before accessing this property. jpayne@69: """ jpayne@69: jpayne@69: # Try charset from content-type jpayne@69: content = None jpayne@69: encoding = self.encoding jpayne@69: jpayne@69: if not self.content: jpayne@69: return "" jpayne@69: jpayne@69: # Fallback to auto-detected encoding. jpayne@69: if self.encoding is None: jpayne@69: encoding = self.apparent_encoding jpayne@69: jpayne@69: # Decode unicode from given encoding. jpayne@69: try: jpayne@69: content = str(self.content, encoding, errors="replace") jpayne@69: except (LookupError, TypeError): jpayne@69: # A LookupError is raised if the encoding was not found which could jpayne@69: # indicate a misspelling or similar mistake. jpayne@69: # jpayne@69: # A TypeError can be raised if encoding is None jpayne@69: # jpayne@69: # So we try blindly encoding. jpayne@69: content = str(self.content, errors="replace") jpayne@69: jpayne@69: return content jpayne@69: jpayne@69: def json(self, **kwargs): jpayne@69: r"""Returns the json-encoded content of a response, if any. jpayne@69: jpayne@69: :param \*\*kwargs: Optional arguments that ``json.loads`` takes. jpayne@69: :raises requests.exceptions.JSONDecodeError: If the response body does not jpayne@69: contain valid json. jpayne@69: """ jpayne@69: jpayne@69: if not self.encoding and self.content and len(self.content) > 3: jpayne@69: # No encoding set. JSON RFC 4627 section 3 states we should expect jpayne@69: # UTF-8, -16 or -32. Detect which one to use; If the detection or jpayne@69: # decoding fails, fall back to `self.text` (using charset_normalizer to make jpayne@69: # a best guess). jpayne@69: encoding = guess_json_utf(self.content) jpayne@69: if encoding is not None: jpayne@69: try: jpayne@69: return complexjson.loads(self.content.decode(encoding), **kwargs) jpayne@69: except UnicodeDecodeError: jpayne@69: # Wrong UTF codec detected; usually because it's not UTF-8 jpayne@69: # but some other 8-bit codec. This is an RFC violation, jpayne@69: # and the server didn't bother to tell us what codec *was* jpayne@69: # used. jpayne@69: pass jpayne@69: except JSONDecodeError as e: jpayne@69: raise RequestsJSONDecodeError(e.msg, e.doc, e.pos) jpayne@69: jpayne@69: try: jpayne@69: return complexjson.loads(self.text, **kwargs) jpayne@69: except JSONDecodeError as e: jpayne@69: # Catch JSON-related errors and raise as requests.JSONDecodeError jpayne@69: # This aliases json.JSONDecodeError and simplejson.JSONDecodeError jpayne@69: raise RequestsJSONDecodeError(e.msg, e.doc, e.pos) jpayne@69: jpayne@69: @property jpayne@69: def links(self): jpayne@69: """Returns the parsed header links of the response, if any.""" jpayne@69: jpayne@69: header = self.headers.get("link") jpayne@69: jpayne@69: resolved_links = {} jpayne@69: jpayne@69: if header: jpayne@69: links = parse_header_links(header) jpayne@69: jpayne@69: for link in links: jpayne@69: key = link.get("rel") or link.get("url") jpayne@69: resolved_links[key] = link jpayne@69: jpayne@69: return resolved_links jpayne@69: jpayne@69: def raise_for_status(self): jpayne@69: """Raises :class:`HTTPError`, if one occurred.""" jpayne@69: jpayne@69: http_error_msg = "" jpayne@69: if isinstance(self.reason, bytes): jpayne@69: # We attempt to decode utf-8 first because some servers jpayne@69: # choose to localize their reason strings. If the string jpayne@69: # isn't utf-8, we fall back to iso-8859-1 for all other jpayne@69: # encodings. (See PR #3538) jpayne@69: try: jpayne@69: reason = self.reason.decode("utf-8") jpayne@69: except UnicodeDecodeError: jpayne@69: reason = self.reason.decode("iso-8859-1") jpayne@69: else: jpayne@69: reason = self.reason jpayne@69: jpayne@69: if 400 <= self.status_code < 500: jpayne@69: http_error_msg = ( jpayne@69: f"{self.status_code} Client Error: {reason} for url: {self.url}" jpayne@69: ) jpayne@69: jpayne@69: elif 500 <= self.status_code < 600: jpayne@69: http_error_msg = ( jpayne@69: f"{self.status_code} Server Error: {reason} for url: {self.url}" jpayne@69: ) jpayne@69: jpayne@69: if http_error_msg: jpayne@69: raise HTTPError(http_error_msg, response=self) jpayne@69: jpayne@69: def close(self): jpayne@69: """Releases the connection back to the pool. Once this method has been jpayne@69: called the underlying ``raw`` object must not be accessed again. jpayne@69: jpayne@69: *Note: Should not normally need to be called explicitly.* jpayne@69: """ jpayne@69: if not self._content_consumed: jpayne@69: self.raw.close() jpayne@69: jpayne@69: release_conn = getattr(self.raw, "release_conn", None) jpayne@69: if release_conn is not None: jpayne@69: release_conn()