annotate urllib3/response.py @ 15:0a3943480712

planemo upload for repository https://toolrepo.galaxytrakr.org/view/jpayne/bioproject_to_srr_2/556cac4fb538
author jpayne
date Tue, 21 May 2024 01:05:30 -0400
parents 5eb2d5e3bf22
children
rev   line source
jpayne@7 1 from __future__ import annotations
jpayne@7 2
jpayne@7 3 import collections
jpayne@7 4 import io
jpayne@7 5 import json as _json
jpayne@7 6 import logging
jpayne@7 7 import re
jpayne@7 8 import sys
jpayne@7 9 import typing
jpayne@7 10 import warnings
jpayne@7 11 import zlib
jpayne@7 12 from contextlib import contextmanager
jpayne@7 13 from http.client import HTTPMessage as _HttplibHTTPMessage
jpayne@7 14 from http.client import HTTPResponse as _HttplibHTTPResponse
jpayne@7 15 from socket import timeout as SocketTimeout
jpayne@7 16
jpayne@7 17 if typing.TYPE_CHECKING:
jpayne@7 18 from ._base_connection import BaseHTTPConnection
jpayne@7 19
jpayne@7 20 try:
jpayne@7 21 try:
jpayne@7 22 import brotlicffi as brotli # type: ignore[import-not-found]
jpayne@7 23 except ImportError:
jpayne@7 24 import brotli # type: ignore[import-not-found]
jpayne@7 25 except ImportError:
jpayne@7 26 brotli = None
jpayne@7 27
jpayne@7 28 try:
jpayne@7 29 import zstandard as zstd # type: ignore[import-not-found]
jpayne@7 30
jpayne@7 31 # The package 'zstandard' added the 'eof' property starting
jpayne@7 32 # in v0.18.0 which we require to ensure a complete and
jpayne@7 33 # valid zstd stream was fed into the ZstdDecoder.
jpayne@7 34 # See: https://github.com/urllib3/urllib3/pull/2624
jpayne@7 35 _zstd_version = _zstd_version = tuple(
jpayne@7 36 map(int, re.search(r"^([0-9]+)\.([0-9]+)", zstd.__version__).groups()) # type: ignore[union-attr]
jpayne@7 37 )
jpayne@7 38 if _zstd_version < (0, 18): # Defensive:
jpayne@7 39 zstd = None
jpayne@7 40
jpayne@7 41 except (AttributeError, ImportError, ValueError): # Defensive:
jpayne@7 42 zstd = None
jpayne@7 43
jpayne@7 44 from . import util
jpayne@7 45 from ._base_connection import _TYPE_BODY
jpayne@7 46 from ._collections import HTTPHeaderDict
jpayne@7 47 from .connection import BaseSSLError, HTTPConnection, HTTPException
jpayne@7 48 from .exceptions import (
jpayne@7 49 BodyNotHttplibCompatible,
jpayne@7 50 DecodeError,
jpayne@7 51 HTTPError,
jpayne@7 52 IncompleteRead,
jpayne@7 53 InvalidChunkLength,
jpayne@7 54 InvalidHeader,
jpayne@7 55 ProtocolError,
jpayne@7 56 ReadTimeoutError,
jpayne@7 57 ResponseNotChunked,
jpayne@7 58 SSLError,
jpayne@7 59 )
jpayne@7 60 from .util.response import is_fp_closed, is_response_to_head
jpayne@7 61 from .util.retry import Retry
jpayne@7 62
jpayne@7 63 if typing.TYPE_CHECKING:
jpayne@7 64 from typing import Literal
jpayne@7 65
jpayne@7 66 from .connectionpool import HTTPConnectionPool
jpayne@7 67
jpayne@7 68 log = logging.getLogger(__name__)
jpayne@7 69
jpayne@7 70
jpayne@7 71 class ContentDecoder:
jpayne@7 72 def decompress(self, data: bytes) -> bytes:
jpayne@7 73 raise NotImplementedError()
jpayne@7 74
jpayne@7 75 def flush(self) -> bytes:
jpayne@7 76 raise NotImplementedError()
jpayne@7 77
jpayne@7 78
jpayne@7 79 class DeflateDecoder(ContentDecoder):
jpayne@7 80 def __init__(self) -> None:
jpayne@7 81 self._first_try = True
jpayne@7 82 self._data = b""
jpayne@7 83 self._obj = zlib.decompressobj()
jpayne@7 84
jpayne@7 85 def decompress(self, data: bytes) -> bytes:
jpayne@7 86 if not data:
jpayne@7 87 return data
jpayne@7 88
jpayne@7 89 if not self._first_try:
jpayne@7 90 return self._obj.decompress(data)
jpayne@7 91
jpayne@7 92 self._data += data
jpayne@7 93 try:
jpayne@7 94 decompressed = self._obj.decompress(data)
jpayne@7 95 if decompressed:
jpayne@7 96 self._first_try = False
jpayne@7 97 self._data = None # type: ignore[assignment]
jpayne@7 98 return decompressed
jpayne@7 99 except zlib.error:
jpayne@7 100 self._first_try = False
jpayne@7 101 self._obj = zlib.decompressobj(-zlib.MAX_WBITS)
jpayne@7 102 try:
jpayne@7 103 return self.decompress(self._data)
jpayne@7 104 finally:
jpayne@7 105 self._data = None # type: ignore[assignment]
jpayne@7 106
jpayne@7 107 def flush(self) -> bytes:
jpayne@7 108 return self._obj.flush()
jpayne@7 109
jpayne@7 110
jpayne@7 111 class GzipDecoderState:
jpayne@7 112 FIRST_MEMBER = 0
jpayne@7 113 OTHER_MEMBERS = 1
jpayne@7 114 SWALLOW_DATA = 2
jpayne@7 115
jpayne@7 116
jpayne@7 117 class GzipDecoder(ContentDecoder):
jpayne@7 118 def __init__(self) -> None:
jpayne@7 119 self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
jpayne@7 120 self._state = GzipDecoderState.FIRST_MEMBER
jpayne@7 121
jpayne@7 122 def decompress(self, data: bytes) -> bytes:
jpayne@7 123 ret = bytearray()
jpayne@7 124 if self._state == GzipDecoderState.SWALLOW_DATA or not data:
jpayne@7 125 return bytes(ret)
jpayne@7 126 while True:
jpayne@7 127 try:
jpayne@7 128 ret += self._obj.decompress(data)
jpayne@7 129 except zlib.error:
jpayne@7 130 previous_state = self._state
jpayne@7 131 # Ignore data after the first error
jpayne@7 132 self._state = GzipDecoderState.SWALLOW_DATA
jpayne@7 133 if previous_state == GzipDecoderState.OTHER_MEMBERS:
jpayne@7 134 # Allow trailing garbage acceptable in other gzip clients
jpayne@7 135 return bytes(ret)
jpayne@7 136 raise
jpayne@7 137 data = self._obj.unused_data
jpayne@7 138 if not data:
jpayne@7 139 return bytes(ret)
jpayne@7 140 self._state = GzipDecoderState.OTHER_MEMBERS
jpayne@7 141 self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
jpayne@7 142
jpayne@7 143 def flush(self) -> bytes:
jpayne@7 144 return self._obj.flush()
jpayne@7 145
jpayne@7 146
jpayne@7 147 if brotli is not None:
jpayne@7 148
jpayne@7 149 class BrotliDecoder(ContentDecoder):
jpayne@7 150 # Supports both 'brotlipy' and 'Brotli' packages
jpayne@7 151 # since they share an import name. The top branches
jpayne@7 152 # are for 'brotlipy' and bottom branches for 'Brotli'
jpayne@7 153 def __init__(self) -> None:
jpayne@7 154 self._obj = brotli.Decompressor()
jpayne@7 155 if hasattr(self._obj, "decompress"):
jpayne@7 156 setattr(self, "decompress", self._obj.decompress)
jpayne@7 157 else:
jpayne@7 158 setattr(self, "decompress", self._obj.process)
jpayne@7 159
jpayne@7 160 def flush(self) -> bytes:
jpayne@7 161 if hasattr(self._obj, "flush"):
jpayne@7 162 return self._obj.flush() # type: ignore[no-any-return]
jpayne@7 163 return b""
jpayne@7 164
jpayne@7 165
jpayne@7 166 if zstd is not None:
jpayne@7 167
jpayne@7 168 class ZstdDecoder(ContentDecoder):
jpayne@7 169 def __init__(self) -> None:
jpayne@7 170 self._obj = zstd.ZstdDecompressor().decompressobj()
jpayne@7 171
jpayne@7 172 def decompress(self, data: bytes) -> bytes:
jpayne@7 173 if not data:
jpayne@7 174 return b""
jpayne@7 175 data_parts = [self._obj.decompress(data)]
jpayne@7 176 while self._obj.eof and self._obj.unused_data:
jpayne@7 177 unused_data = self._obj.unused_data
jpayne@7 178 self._obj = zstd.ZstdDecompressor().decompressobj()
jpayne@7 179 data_parts.append(self._obj.decompress(unused_data))
jpayne@7 180 return b"".join(data_parts)
jpayne@7 181
jpayne@7 182 def flush(self) -> bytes:
jpayne@7 183 ret = self._obj.flush() # note: this is a no-op
jpayne@7 184 if not self._obj.eof:
jpayne@7 185 raise DecodeError("Zstandard data is incomplete")
jpayne@7 186 return ret # type: ignore[no-any-return]
jpayne@7 187
jpayne@7 188
jpayne@7 189 class MultiDecoder(ContentDecoder):
jpayne@7 190 """
jpayne@7 191 From RFC7231:
jpayne@7 192 If one or more encodings have been applied to a representation, the
jpayne@7 193 sender that applied the encodings MUST generate a Content-Encoding
jpayne@7 194 header field that lists the content codings in the order in which
jpayne@7 195 they were applied.
jpayne@7 196 """
jpayne@7 197
jpayne@7 198 def __init__(self, modes: str) -> None:
jpayne@7 199 self._decoders = [_get_decoder(m.strip()) for m in modes.split(",")]
jpayne@7 200
jpayne@7 201 def flush(self) -> bytes:
jpayne@7 202 return self._decoders[0].flush()
jpayne@7 203
jpayne@7 204 def decompress(self, data: bytes) -> bytes:
jpayne@7 205 for d in reversed(self._decoders):
jpayne@7 206 data = d.decompress(data)
jpayne@7 207 return data
jpayne@7 208
jpayne@7 209
jpayne@7 210 def _get_decoder(mode: str) -> ContentDecoder:
jpayne@7 211 if "," in mode:
jpayne@7 212 return MultiDecoder(mode)
jpayne@7 213
jpayne@7 214 # According to RFC 9110 section 8.4.1.3, recipients should
jpayne@7 215 # consider x-gzip equivalent to gzip
jpayne@7 216 if mode in ("gzip", "x-gzip"):
jpayne@7 217 return GzipDecoder()
jpayne@7 218
jpayne@7 219 if brotli is not None and mode == "br":
jpayne@7 220 return BrotliDecoder()
jpayne@7 221
jpayne@7 222 if zstd is not None and mode == "zstd":
jpayne@7 223 return ZstdDecoder()
jpayne@7 224
jpayne@7 225 return DeflateDecoder()
jpayne@7 226
jpayne@7 227
jpayne@7 228 class BytesQueueBuffer:
jpayne@7 229 """Memory-efficient bytes buffer
jpayne@7 230
jpayne@7 231 To return decoded data in read() and still follow the BufferedIOBase API, we need a
jpayne@7 232 buffer to always return the correct amount of bytes.
jpayne@7 233
jpayne@7 234 This buffer should be filled using calls to put()
jpayne@7 235
jpayne@7 236 Our maximum memory usage is determined by the sum of the size of:
jpayne@7 237
jpayne@7 238 * self.buffer, which contains the full data
jpayne@7 239 * the largest chunk that we will copy in get()
jpayne@7 240
jpayne@7 241 The worst case scenario is a single chunk, in which case we'll make a full copy of
jpayne@7 242 the data inside get().
jpayne@7 243 """
jpayne@7 244
jpayne@7 245 def __init__(self) -> None:
jpayne@7 246 self.buffer: typing.Deque[bytes] = collections.deque()
jpayne@7 247 self._size: int = 0
jpayne@7 248
jpayne@7 249 def __len__(self) -> int:
jpayne@7 250 return self._size
jpayne@7 251
jpayne@7 252 def put(self, data: bytes) -> None:
jpayne@7 253 self.buffer.append(data)
jpayne@7 254 self._size += len(data)
jpayne@7 255
jpayne@7 256 def get(self, n: int) -> bytes:
jpayne@7 257 if n == 0:
jpayne@7 258 return b""
jpayne@7 259 elif not self.buffer:
jpayne@7 260 raise RuntimeError("buffer is empty")
jpayne@7 261 elif n < 0:
jpayne@7 262 raise ValueError("n should be > 0")
jpayne@7 263
jpayne@7 264 fetched = 0
jpayne@7 265 ret = io.BytesIO()
jpayne@7 266 while fetched < n:
jpayne@7 267 remaining = n - fetched
jpayne@7 268 chunk = self.buffer.popleft()
jpayne@7 269 chunk_length = len(chunk)
jpayne@7 270 if remaining < chunk_length:
jpayne@7 271 left_chunk, right_chunk = chunk[:remaining], chunk[remaining:]
jpayne@7 272 ret.write(left_chunk)
jpayne@7 273 self.buffer.appendleft(right_chunk)
jpayne@7 274 self._size -= remaining
jpayne@7 275 break
jpayne@7 276 else:
jpayne@7 277 ret.write(chunk)
jpayne@7 278 self._size -= chunk_length
jpayne@7 279 fetched += chunk_length
jpayne@7 280
jpayne@7 281 if not self.buffer:
jpayne@7 282 break
jpayne@7 283
jpayne@7 284 return ret.getvalue()
jpayne@7 285
jpayne@7 286 def get_all(self) -> bytes:
jpayne@7 287 buffer = self.buffer
jpayne@7 288 if not buffer:
jpayne@7 289 assert self._size == 0
jpayne@7 290 return b""
jpayne@7 291 if len(buffer) == 1:
jpayne@7 292 result = buffer.pop()
jpayne@7 293 else:
jpayne@7 294 ret = io.BytesIO()
jpayne@7 295 ret.writelines(buffer.popleft() for _ in range(len(buffer)))
jpayne@7 296 result = ret.getvalue()
jpayne@7 297 self._size = 0
jpayne@7 298 return result
jpayne@7 299
jpayne@7 300
jpayne@7 301 class BaseHTTPResponse(io.IOBase):
jpayne@7 302 CONTENT_DECODERS = ["gzip", "x-gzip", "deflate"]
jpayne@7 303 if brotli is not None:
jpayne@7 304 CONTENT_DECODERS += ["br"]
jpayne@7 305 if zstd is not None:
jpayne@7 306 CONTENT_DECODERS += ["zstd"]
jpayne@7 307 REDIRECT_STATUSES = [301, 302, 303, 307, 308]
jpayne@7 308
jpayne@7 309 DECODER_ERROR_CLASSES: tuple[type[Exception], ...] = (IOError, zlib.error)
jpayne@7 310 if brotli is not None:
jpayne@7 311 DECODER_ERROR_CLASSES += (brotli.error,)
jpayne@7 312
jpayne@7 313 if zstd is not None:
jpayne@7 314 DECODER_ERROR_CLASSES += (zstd.ZstdError,)
jpayne@7 315
jpayne@7 316 def __init__(
jpayne@7 317 self,
jpayne@7 318 *,
jpayne@7 319 headers: typing.Mapping[str, str] | typing.Mapping[bytes, bytes] | None = None,
jpayne@7 320 status: int,
jpayne@7 321 version: int,
jpayne@7 322 reason: str | None,
jpayne@7 323 decode_content: bool,
jpayne@7 324 request_url: str | None,
jpayne@7 325 retries: Retry | None = None,
jpayne@7 326 ) -> None:
jpayne@7 327 if isinstance(headers, HTTPHeaderDict):
jpayne@7 328 self.headers = headers
jpayne@7 329 else:
jpayne@7 330 self.headers = HTTPHeaderDict(headers) # type: ignore[arg-type]
jpayne@7 331 self.status = status
jpayne@7 332 self.version = version
jpayne@7 333 self.reason = reason
jpayne@7 334 self.decode_content = decode_content
jpayne@7 335 self._has_decoded_content = False
jpayne@7 336 self._request_url: str | None = request_url
jpayne@7 337 self.retries = retries
jpayne@7 338
jpayne@7 339 self.chunked = False
jpayne@7 340 tr_enc = self.headers.get("transfer-encoding", "").lower()
jpayne@7 341 # Don't incur the penalty of creating a list and then discarding it
jpayne@7 342 encodings = (enc.strip() for enc in tr_enc.split(","))
jpayne@7 343 if "chunked" in encodings:
jpayne@7 344 self.chunked = True
jpayne@7 345
jpayne@7 346 self._decoder: ContentDecoder | None = None
jpayne@7 347 self.length_remaining: int | None
jpayne@7 348
jpayne@7 349 def get_redirect_location(self) -> str | None | Literal[False]:
jpayne@7 350 """
jpayne@7 351 Should we redirect and where to?
jpayne@7 352
jpayne@7 353 :returns: Truthy redirect location string if we got a redirect status
jpayne@7 354 code and valid location. ``None`` if redirect status and no
jpayne@7 355 location. ``False`` if not a redirect status code.
jpayne@7 356 """
jpayne@7 357 if self.status in self.REDIRECT_STATUSES:
jpayne@7 358 return self.headers.get("location")
jpayne@7 359 return False
jpayne@7 360
jpayne@7 361 @property
jpayne@7 362 def data(self) -> bytes:
jpayne@7 363 raise NotImplementedError()
jpayne@7 364
jpayne@7 365 def json(self) -> typing.Any:
jpayne@7 366 """
jpayne@7 367 Parses the body of the HTTP response as JSON.
jpayne@7 368
jpayne@7 369 To use a custom JSON decoder pass the result of :attr:`HTTPResponse.data` to the decoder.
jpayne@7 370
jpayne@7 371 This method can raise either `UnicodeDecodeError` or `json.JSONDecodeError`.
jpayne@7 372
jpayne@7 373 Read more :ref:`here <json>`.
jpayne@7 374 """
jpayne@7 375 data = self.data.decode("utf-8")
jpayne@7 376 return _json.loads(data)
jpayne@7 377
jpayne@7 378 @property
jpayne@7 379 def url(self) -> str | None:
jpayne@7 380 raise NotImplementedError()
jpayne@7 381
jpayne@7 382 @url.setter
jpayne@7 383 def url(self, url: str | None) -> None:
jpayne@7 384 raise NotImplementedError()
jpayne@7 385
jpayne@7 386 @property
jpayne@7 387 def connection(self) -> BaseHTTPConnection | None:
jpayne@7 388 raise NotImplementedError()
jpayne@7 389
jpayne@7 390 @property
jpayne@7 391 def retries(self) -> Retry | None:
jpayne@7 392 return self._retries
jpayne@7 393
jpayne@7 394 @retries.setter
jpayne@7 395 def retries(self, retries: Retry | None) -> None:
jpayne@7 396 # Override the request_url if retries has a redirect location.
jpayne@7 397 if retries is not None and retries.history:
jpayne@7 398 self.url = retries.history[-1].redirect_location
jpayne@7 399 self._retries = retries
jpayne@7 400
jpayne@7 401 def stream(
jpayne@7 402 self, amt: int | None = 2**16, decode_content: bool | None = None
jpayne@7 403 ) -> typing.Iterator[bytes]:
jpayne@7 404 raise NotImplementedError()
jpayne@7 405
jpayne@7 406 def read(
jpayne@7 407 self,
jpayne@7 408 amt: int | None = None,
jpayne@7 409 decode_content: bool | None = None,
jpayne@7 410 cache_content: bool = False,
jpayne@7 411 ) -> bytes:
jpayne@7 412 raise NotImplementedError()
jpayne@7 413
jpayne@7 414 def read1(
jpayne@7 415 self,
jpayne@7 416 amt: int | None = None,
jpayne@7 417 decode_content: bool | None = None,
jpayne@7 418 ) -> bytes:
jpayne@7 419 raise NotImplementedError()
jpayne@7 420
jpayne@7 421 def read_chunked(
jpayne@7 422 self,
jpayne@7 423 amt: int | None = None,
jpayne@7 424 decode_content: bool | None = None,
jpayne@7 425 ) -> typing.Iterator[bytes]:
jpayne@7 426 raise NotImplementedError()
jpayne@7 427
jpayne@7 428 def release_conn(self) -> None:
jpayne@7 429 raise NotImplementedError()
jpayne@7 430
jpayne@7 431 def drain_conn(self) -> None:
jpayne@7 432 raise NotImplementedError()
jpayne@7 433
jpayne@7 434 def close(self) -> None:
jpayne@7 435 raise NotImplementedError()
jpayne@7 436
jpayne@7 437 def _init_decoder(self) -> None:
jpayne@7 438 """
jpayne@7 439 Set-up the _decoder attribute if necessary.
jpayne@7 440 """
jpayne@7 441 # Note: content-encoding value should be case-insensitive, per RFC 7230
jpayne@7 442 # Section 3.2
jpayne@7 443 content_encoding = self.headers.get("content-encoding", "").lower()
jpayne@7 444 if self._decoder is None:
jpayne@7 445 if content_encoding in self.CONTENT_DECODERS:
jpayne@7 446 self._decoder = _get_decoder(content_encoding)
jpayne@7 447 elif "," in content_encoding:
jpayne@7 448 encodings = [
jpayne@7 449 e.strip()
jpayne@7 450 for e in content_encoding.split(",")
jpayne@7 451 if e.strip() in self.CONTENT_DECODERS
jpayne@7 452 ]
jpayne@7 453 if encodings:
jpayne@7 454 self._decoder = _get_decoder(content_encoding)
jpayne@7 455
jpayne@7 456 def _decode(
jpayne@7 457 self, data: bytes, decode_content: bool | None, flush_decoder: bool
jpayne@7 458 ) -> bytes:
jpayne@7 459 """
jpayne@7 460 Decode the data passed in and potentially flush the decoder.
jpayne@7 461 """
jpayne@7 462 if not decode_content:
jpayne@7 463 if self._has_decoded_content:
jpayne@7 464 raise RuntimeError(
jpayne@7 465 "Calling read(decode_content=False) is not supported after "
jpayne@7 466 "read(decode_content=True) was called."
jpayne@7 467 )
jpayne@7 468 return data
jpayne@7 469
jpayne@7 470 try:
jpayne@7 471 if self._decoder:
jpayne@7 472 data = self._decoder.decompress(data)
jpayne@7 473 self._has_decoded_content = True
jpayne@7 474 except self.DECODER_ERROR_CLASSES as e:
jpayne@7 475 content_encoding = self.headers.get("content-encoding", "").lower()
jpayne@7 476 raise DecodeError(
jpayne@7 477 "Received response with content-encoding: %s, but "
jpayne@7 478 "failed to decode it." % content_encoding,
jpayne@7 479 e,
jpayne@7 480 ) from e
jpayne@7 481 if flush_decoder:
jpayne@7 482 data += self._flush_decoder()
jpayne@7 483
jpayne@7 484 return data
jpayne@7 485
jpayne@7 486 def _flush_decoder(self) -> bytes:
jpayne@7 487 """
jpayne@7 488 Flushes the decoder. Should only be called if the decoder is actually
jpayne@7 489 being used.
jpayne@7 490 """
jpayne@7 491 if self._decoder:
jpayne@7 492 return self._decoder.decompress(b"") + self._decoder.flush()
jpayne@7 493 return b""
jpayne@7 494
jpayne@7 495 # Compatibility methods for `io` module
jpayne@7 496 def readinto(self, b: bytearray) -> int:
jpayne@7 497 temp = self.read(len(b))
jpayne@7 498 if len(temp) == 0:
jpayne@7 499 return 0
jpayne@7 500 else:
jpayne@7 501 b[: len(temp)] = temp
jpayne@7 502 return len(temp)
jpayne@7 503
jpayne@7 504 # Compatibility methods for http.client.HTTPResponse
jpayne@7 505 def getheaders(self) -> HTTPHeaderDict:
jpayne@7 506 warnings.warn(
jpayne@7 507 "HTTPResponse.getheaders() is deprecated and will be removed "
jpayne@7 508 "in urllib3 v2.1.0. Instead access HTTPResponse.headers directly.",
jpayne@7 509 category=DeprecationWarning,
jpayne@7 510 stacklevel=2,
jpayne@7 511 )
jpayne@7 512 return self.headers
jpayne@7 513
jpayne@7 514 def getheader(self, name: str, default: str | None = None) -> str | None:
jpayne@7 515 warnings.warn(
jpayne@7 516 "HTTPResponse.getheader() is deprecated and will be removed "
jpayne@7 517 "in urllib3 v2.1.0. Instead use HTTPResponse.headers.get(name, default).",
jpayne@7 518 category=DeprecationWarning,
jpayne@7 519 stacklevel=2,
jpayne@7 520 )
jpayne@7 521 return self.headers.get(name, default)
jpayne@7 522
jpayne@7 523 # Compatibility method for http.cookiejar
jpayne@7 524 def info(self) -> HTTPHeaderDict:
jpayne@7 525 return self.headers
jpayne@7 526
jpayne@7 527 def geturl(self) -> str | None:
jpayne@7 528 return self.url
jpayne@7 529
jpayne@7 530
jpayne@7 531 class HTTPResponse(BaseHTTPResponse):
jpayne@7 532 """
jpayne@7 533 HTTP Response container.
jpayne@7 534
jpayne@7 535 Backwards-compatible with :class:`http.client.HTTPResponse` but the response ``body`` is
jpayne@7 536 loaded and decoded on-demand when the ``data`` property is accessed. This
jpayne@7 537 class is also compatible with the Python standard library's :mod:`io`
jpayne@7 538 module, and can hence be treated as a readable object in the context of that
jpayne@7 539 framework.
jpayne@7 540
jpayne@7 541 Extra parameters for behaviour not present in :class:`http.client.HTTPResponse`:
jpayne@7 542
jpayne@7 543 :param preload_content:
jpayne@7 544 If True, the response's body will be preloaded during construction.
jpayne@7 545
jpayne@7 546 :param decode_content:
jpayne@7 547 If True, will attempt to decode the body based on the
jpayne@7 548 'content-encoding' header.
jpayne@7 549
jpayne@7 550 :param original_response:
jpayne@7 551 When this HTTPResponse wrapper is generated from an :class:`http.client.HTTPResponse`
jpayne@7 552 object, it's convenient to include the original for debug purposes. It's
jpayne@7 553 otherwise unused.
jpayne@7 554
jpayne@7 555 :param retries:
jpayne@7 556 The retries contains the last :class:`~urllib3.util.retry.Retry` that
jpayne@7 557 was used during the request.
jpayne@7 558
jpayne@7 559 :param enforce_content_length:
jpayne@7 560 Enforce content length checking. Body returned by server must match
jpayne@7 561 value of Content-Length header, if present. Otherwise, raise error.
jpayne@7 562 """
jpayne@7 563
jpayne@7 564 def __init__(
jpayne@7 565 self,
jpayne@7 566 body: _TYPE_BODY = "",
jpayne@7 567 headers: typing.Mapping[str, str] | typing.Mapping[bytes, bytes] | None = None,
jpayne@7 568 status: int = 0,
jpayne@7 569 version: int = 0,
jpayne@7 570 reason: str | None = None,
jpayne@7 571 preload_content: bool = True,
jpayne@7 572 decode_content: bool = True,
jpayne@7 573 original_response: _HttplibHTTPResponse | None = None,
jpayne@7 574 pool: HTTPConnectionPool | None = None,
jpayne@7 575 connection: HTTPConnection | None = None,
jpayne@7 576 msg: _HttplibHTTPMessage | None = None,
jpayne@7 577 retries: Retry | None = None,
jpayne@7 578 enforce_content_length: bool = True,
jpayne@7 579 request_method: str | None = None,
jpayne@7 580 request_url: str | None = None,
jpayne@7 581 auto_close: bool = True,
jpayne@7 582 ) -> None:
jpayne@7 583 super().__init__(
jpayne@7 584 headers=headers,
jpayne@7 585 status=status,
jpayne@7 586 version=version,
jpayne@7 587 reason=reason,
jpayne@7 588 decode_content=decode_content,
jpayne@7 589 request_url=request_url,
jpayne@7 590 retries=retries,
jpayne@7 591 )
jpayne@7 592
jpayne@7 593 self.enforce_content_length = enforce_content_length
jpayne@7 594 self.auto_close = auto_close
jpayne@7 595
jpayne@7 596 self._body = None
jpayne@7 597 self._fp: _HttplibHTTPResponse | None = None
jpayne@7 598 self._original_response = original_response
jpayne@7 599 self._fp_bytes_read = 0
jpayne@7 600 self.msg = msg
jpayne@7 601
jpayne@7 602 if body and isinstance(body, (str, bytes)):
jpayne@7 603 self._body = body
jpayne@7 604
jpayne@7 605 self._pool = pool
jpayne@7 606 self._connection = connection
jpayne@7 607
jpayne@7 608 if hasattr(body, "read"):
jpayne@7 609 self._fp = body # type: ignore[assignment]
jpayne@7 610
jpayne@7 611 # Are we using the chunked-style of transfer encoding?
jpayne@7 612 self.chunk_left: int | None = None
jpayne@7 613
jpayne@7 614 # Determine length of response
jpayne@7 615 self.length_remaining = self._init_length(request_method)
jpayne@7 616
jpayne@7 617 # Used to return the correct amount of bytes for partial read()s
jpayne@7 618 self._decoded_buffer = BytesQueueBuffer()
jpayne@7 619
jpayne@7 620 # If requested, preload the body.
jpayne@7 621 if preload_content and not self._body:
jpayne@7 622 self._body = self.read(decode_content=decode_content)
jpayne@7 623
jpayne@7 624 def release_conn(self) -> None:
jpayne@7 625 if not self._pool or not self._connection:
jpayne@7 626 return None
jpayne@7 627
jpayne@7 628 self._pool._put_conn(self._connection)
jpayne@7 629 self._connection = None
jpayne@7 630
jpayne@7 631 def drain_conn(self) -> None:
jpayne@7 632 """
jpayne@7 633 Read and discard any remaining HTTP response data in the response connection.
jpayne@7 634
jpayne@7 635 Unread data in the HTTPResponse connection blocks the connection from being released back to the pool.
jpayne@7 636 """
jpayne@7 637 try:
jpayne@7 638 self.read()
jpayne@7 639 except (HTTPError, OSError, BaseSSLError, HTTPException):
jpayne@7 640 pass
jpayne@7 641
jpayne@7 642 @property
jpayne@7 643 def data(self) -> bytes:
jpayne@7 644 # For backwards-compat with earlier urllib3 0.4 and earlier.
jpayne@7 645 if self._body:
jpayne@7 646 return self._body # type: ignore[return-value]
jpayne@7 647
jpayne@7 648 if self._fp:
jpayne@7 649 return self.read(cache_content=True)
jpayne@7 650
jpayne@7 651 return None # type: ignore[return-value]
jpayne@7 652
jpayne@7 653 @property
jpayne@7 654 def connection(self) -> HTTPConnection | None:
jpayne@7 655 return self._connection
jpayne@7 656
jpayne@7 657 def isclosed(self) -> bool:
jpayne@7 658 return is_fp_closed(self._fp)
jpayne@7 659
jpayne@7 660 def tell(self) -> int:
jpayne@7 661 """
jpayne@7 662 Obtain the number of bytes pulled over the wire so far. May differ from
jpayne@7 663 the amount of content returned by :meth:``urllib3.response.HTTPResponse.read``
jpayne@7 664 if bytes are encoded on the wire (e.g, compressed).
jpayne@7 665 """
jpayne@7 666 return self._fp_bytes_read
jpayne@7 667
jpayne@7 668 def _init_length(self, request_method: str | None) -> int | None:
jpayne@7 669 """
jpayne@7 670 Set initial length value for Response content if available.
jpayne@7 671 """
jpayne@7 672 length: int | None
jpayne@7 673 content_length: str | None = self.headers.get("content-length")
jpayne@7 674
jpayne@7 675 if content_length is not None:
jpayne@7 676 if self.chunked:
jpayne@7 677 # This Response will fail with an IncompleteRead if it can't be
jpayne@7 678 # received as chunked. This method falls back to attempt reading
jpayne@7 679 # the response before raising an exception.
jpayne@7 680 log.warning(
jpayne@7 681 "Received response with both Content-Length and "
jpayne@7 682 "Transfer-Encoding set. This is expressly forbidden "
jpayne@7 683 "by RFC 7230 sec 3.3.2. Ignoring Content-Length and "
jpayne@7 684 "attempting to process response as Transfer-Encoding: "
jpayne@7 685 "chunked."
jpayne@7 686 )
jpayne@7 687 return None
jpayne@7 688
jpayne@7 689 try:
jpayne@7 690 # RFC 7230 section 3.3.2 specifies multiple content lengths can
jpayne@7 691 # be sent in a single Content-Length header
jpayne@7 692 # (e.g. Content-Length: 42, 42). This line ensures the values
jpayne@7 693 # are all valid ints and that as long as the `set` length is 1,
jpayne@7 694 # all values are the same. Otherwise, the header is invalid.
jpayne@7 695 lengths = {int(val) for val in content_length.split(",")}
jpayne@7 696 if len(lengths) > 1:
jpayne@7 697 raise InvalidHeader(
jpayne@7 698 "Content-Length contained multiple "
jpayne@7 699 "unmatching values (%s)" % content_length
jpayne@7 700 )
jpayne@7 701 length = lengths.pop()
jpayne@7 702 except ValueError:
jpayne@7 703 length = None
jpayne@7 704 else:
jpayne@7 705 if length < 0:
jpayne@7 706 length = None
jpayne@7 707
jpayne@7 708 else: # if content_length is None
jpayne@7 709 length = None
jpayne@7 710
jpayne@7 711 # Convert status to int for comparison
jpayne@7 712 # In some cases, httplib returns a status of "_UNKNOWN"
jpayne@7 713 try:
jpayne@7 714 status = int(self.status)
jpayne@7 715 except ValueError:
jpayne@7 716 status = 0
jpayne@7 717
jpayne@7 718 # Check for responses that shouldn't include a body
jpayne@7 719 if status in (204, 304) or 100 <= status < 200 or request_method == "HEAD":
jpayne@7 720 length = 0
jpayne@7 721
jpayne@7 722 return length
jpayne@7 723
jpayne@7 724 @contextmanager
jpayne@7 725 def _error_catcher(self) -> typing.Generator[None, None, None]:
jpayne@7 726 """
jpayne@7 727 Catch low-level python exceptions, instead re-raising urllib3
jpayne@7 728 variants, so that low-level exceptions are not leaked in the
jpayne@7 729 high-level api.
jpayne@7 730
jpayne@7 731 On exit, release the connection back to the pool.
jpayne@7 732 """
jpayne@7 733 clean_exit = False
jpayne@7 734
jpayne@7 735 try:
jpayne@7 736 try:
jpayne@7 737 yield
jpayne@7 738
jpayne@7 739 except SocketTimeout as e:
jpayne@7 740 # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but
jpayne@7 741 # there is yet no clean way to get at it from this context.
jpayne@7 742 raise ReadTimeoutError(self._pool, None, "Read timed out.") from e # type: ignore[arg-type]
jpayne@7 743
jpayne@7 744 except BaseSSLError as e:
jpayne@7 745 # FIXME: Is there a better way to differentiate between SSLErrors?
jpayne@7 746 if "read operation timed out" not in str(e):
jpayne@7 747 # SSL errors related to framing/MAC get wrapped and reraised here
jpayne@7 748 raise SSLError(e) from e
jpayne@7 749
jpayne@7 750 raise ReadTimeoutError(self._pool, None, "Read timed out.") from e # type: ignore[arg-type]
jpayne@7 751
jpayne@7 752 except IncompleteRead as e:
jpayne@7 753 if (
jpayne@7 754 e.expected is not None
jpayne@7 755 and e.partial is not None
jpayne@7 756 and e.expected == -e.partial
jpayne@7 757 ):
jpayne@7 758 arg = "Response may not contain content."
jpayne@7 759 else:
jpayne@7 760 arg = f"Connection broken: {e!r}"
jpayne@7 761 raise ProtocolError(arg, e) from e
jpayne@7 762
jpayne@7 763 except (HTTPException, OSError) as e:
jpayne@7 764 raise ProtocolError(f"Connection broken: {e!r}", e) from e
jpayne@7 765
jpayne@7 766 # If no exception is thrown, we should avoid cleaning up
jpayne@7 767 # unnecessarily.
jpayne@7 768 clean_exit = True
jpayne@7 769 finally:
jpayne@7 770 # If we didn't terminate cleanly, we need to throw away our
jpayne@7 771 # connection.
jpayne@7 772 if not clean_exit:
jpayne@7 773 # The response may not be closed but we're not going to use it
jpayne@7 774 # anymore so close it now to ensure that the connection is
jpayne@7 775 # released back to the pool.
jpayne@7 776 if self._original_response:
jpayne@7 777 self._original_response.close()
jpayne@7 778
jpayne@7 779 # Closing the response may not actually be sufficient to close
jpayne@7 780 # everything, so if we have a hold of the connection close that
jpayne@7 781 # too.
jpayne@7 782 if self._connection:
jpayne@7 783 self._connection.close()
jpayne@7 784
jpayne@7 785 # If we hold the original response but it's closed now, we should
jpayne@7 786 # return the connection back to the pool.
jpayne@7 787 if self._original_response and self._original_response.isclosed():
jpayne@7 788 self.release_conn()
jpayne@7 789
jpayne@7 790 def _fp_read(
jpayne@7 791 self,
jpayne@7 792 amt: int | None = None,
jpayne@7 793 *,
jpayne@7 794 read1: bool = False,
jpayne@7 795 ) -> bytes:
jpayne@7 796 """
jpayne@7 797 Read a response with the thought that reading the number of bytes
jpayne@7 798 larger than can fit in a 32-bit int at a time via SSL in some
jpayne@7 799 known cases leads to an overflow error that has to be prevented
jpayne@7 800 if `amt` or `self.length_remaining` indicate that a problem may
jpayne@7 801 happen.
jpayne@7 802
jpayne@7 803 The known cases:
jpayne@7 804 * 3.8 <= CPython < 3.9.7 because of a bug
jpayne@7 805 https://github.com/urllib3/urllib3/issues/2513#issuecomment-1152559900.
jpayne@7 806 * urllib3 injected with pyOpenSSL-backed SSL-support.
jpayne@7 807 * CPython < 3.10 only when `amt` does not fit 32-bit int.
jpayne@7 808 """
jpayne@7 809 assert self._fp
jpayne@7 810 c_int_max = 2**31 - 1
jpayne@7 811 if (
jpayne@7 812 (amt and amt > c_int_max)
jpayne@7 813 or (
jpayne@7 814 amt is None
jpayne@7 815 and self.length_remaining
jpayne@7 816 and self.length_remaining > c_int_max
jpayne@7 817 )
jpayne@7 818 ) and (util.IS_PYOPENSSL or sys.version_info < (3, 10)):
jpayne@7 819 if read1:
jpayne@7 820 return self._fp.read1(c_int_max)
jpayne@7 821 buffer = io.BytesIO()
jpayne@7 822 # Besides `max_chunk_amt` being a maximum chunk size, it
jpayne@7 823 # affects memory overhead of reading a response by this
jpayne@7 824 # method in CPython.
jpayne@7 825 # `c_int_max` equal to 2 GiB - 1 byte is the actual maximum
jpayne@7 826 # chunk size that does not lead to an overflow error, but
jpayne@7 827 # 256 MiB is a compromise.
jpayne@7 828 max_chunk_amt = 2**28
jpayne@7 829 while amt is None or amt != 0:
jpayne@7 830 if amt is not None:
jpayne@7 831 chunk_amt = min(amt, max_chunk_amt)
jpayne@7 832 amt -= chunk_amt
jpayne@7 833 else:
jpayne@7 834 chunk_amt = max_chunk_amt
jpayne@7 835 data = self._fp.read(chunk_amt)
jpayne@7 836 if not data:
jpayne@7 837 break
jpayne@7 838 buffer.write(data)
jpayne@7 839 del data # to reduce peak memory usage by `max_chunk_amt`.
jpayne@7 840 return buffer.getvalue()
jpayne@7 841 elif read1:
jpayne@7 842 return self._fp.read1(amt) if amt is not None else self._fp.read1()
jpayne@7 843 else:
jpayne@7 844 # StringIO doesn't like amt=None
jpayne@7 845 return self._fp.read(amt) if amt is not None else self._fp.read()
jpayne@7 846
jpayne@7 847 def _raw_read(
jpayne@7 848 self,
jpayne@7 849 amt: int | None = None,
jpayne@7 850 *,
jpayne@7 851 read1: bool = False,
jpayne@7 852 ) -> bytes:
jpayne@7 853 """
jpayne@7 854 Reads `amt` of bytes from the socket.
jpayne@7 855 """
jpayne@7 856 if self._fp is None:
jpayne@7 857 return None # type: ignore[return-value]
jpayne@7 858
jpayne@7 859 fp_closed = getattr(self._fp, "closed", False)
jpayne@7 860
jpayne@7 861 with self._error_catcher():
jpayne@7 862 data = self._fp_read(amt, read1=read1) if not fp_closed else b""
jpayne@7 863 if amt is not None and amt != 0 and not data:
jpayne@7 864 # Platform-specific: Buggy versions of Python.
jpayne@7 865 # Close the connection when no data is returned
jpayne@7 866 #
jpayne@7 867 # This is redundant to what httplib/http.client _should_
jpayne@7 868 # already do. However, versions of python released before
jpayne@7 869 # December 15, 2012 (http://bugs.python.org/issue16298) do
jpayne@7 870 # not properly close the connection in all cases. There is
jpayne@7 871 # no harm in redundantly calling close.
jpayne@7 872 self._fp.close()
jpayne@7 873 if (
jpayne@7 874 self.enforce_content_length
jpayne@7 875 and self.length_remaining is not None
jpayne@7 876 and self.length_remaining != 0
jpayne@7 877 ):
jpayne@7 878 # This is an edge case that httplib failed to cover due
jpayne@7 879 # to concerns of backward compatibility. We're
jpayne@7 880 # addressing it here to make sure IncompleteRead is
jpayne@7 881 # raised during streaming, so all calls with incorrect
jpayne@7 882 # Content-Length are caught.
jpayne@7 883 raise IncompleteRead(self._fp_bytes_read, self.length_remaining)
jpayne@7 884 elif read1 and (
jpayne@7 885 (amt != 0 and not data) or self.length_remaining == len(data)
jpayne@7 886 ):
jpayne@7 887 # All data has been read, but `self._fp.read1` in
jpayne@7 888 # CPython 3.12 and older doesn't always close
jpayne@7 889 # `http.client.HTTPResponse`, so we close it here.
jpayne@7 890 # See https://github.com/python/cpython/issues/113199
jpayne@7 891 self._fp.close()
jpayne@7 892
jpayne@7 893 if data:
jpayne@7 894 self._fp_bytes_read += len(data)
jpayne@7 895 if self.length_remaining is not None:
jpayne@7 896 self.length_remaining -= len(data)
jpayne@7 897 return data
jpayne@7 898
jpayne@7 899 def read(
jpayne@7 900 self,
jpayne@7 901 amt: int | None = None,
jpayne@7 902 decode_content: bool | None = None,
jpayne@7 903 cache_content: bool = False,
jpayne@7 904 ) -> bytes:
jpayne@7 905 """
jpayne@7 906 Similar to :meth:`http.client.HTTPResponse.read`, but with two additional
jpayne@7 907 parameters: ``decode_content`` and ``cache_content``.
jpayne@7 908
jpayne@7 909 :param amt:
jpayne@7 910 How much of the content to read. If specified, caching is skipped
jpayne@7 911 because it doesn't make sense to cache partial content as the full
jpayne@7 912 response.
jpayne@7 913
jpayne@7 914 :param decode_content:
jpayne@7 915 If True, will attempt to decode the body based on the
jpayne@7 916 'content-encoding' header.
jpayne@7 917
jpayne@7 918 :param cache_content:
jpayne@7 919 If True, will save the returned data such that the same result is
jpayne@7 920 returned despite of the state of the underlying file object. This
jpayne@7 921 is useful if you want the ``.data`` property to continue working
jpayne@7 922 after having ``.read()`` the file object. (Overridden if ``amt`` is
jpayne@7 923 set.)
jpayne@7 924 """
jpayne@7 925 self._init_decoder()
jpayne@7 926 if decode_content is None:
jpayne@7 927 decode_content = self.decode_content
jpayne@7 928
jpayne@7 929 if amt is not None:
jpayne@7 930 cache_content = False
jpayne@7 931
jpayne@7 932 if len(self._decoded_buffer) >= amt:
jpayne@7 933 return self._decoded_buffer.get(amt)
jpayne@7 934
jpayne@7 935 data = self._raw_read(amt)
jpayne@7 936
jpayne@7 937 flush_decoder = amt is None or (amt != 0 and not data)
jpayne@7 938
jpayne@7 939 if not data and len(self._decoded_buffer) == 0:
jpayne@7 940 return data
jpayne@7 941
jpayne@7 942 if amt is None:
jpayne@7 943 data = self._decode(data, decode_content, flush_decoder)
jpayne@7 944 if cache_content:
jpayne@7 945 self._body = data
jpayne@7 946 else:
jpayne@7 947 # do not waste memory on buffer when not decoding
jpayne@7 948 if not decode_content:
jpayne@7 949 if self._has_decoded_content:
jpayne@7 950 raise RuntimeError(
jpayne@7 951 "Calling read(decode_content=False) is not supported after "
jpayne@7 952 "read(decode_content=True) was called."
jpayne@7 953 )
jpayne@7 954 return data
jpayne@7 955
jpayne@7 956 decoded_data = self._decode(data, decode_content, flush_decoder)
jpayne@7 957 self._decoded_buffer.put(decoded_data)
jpayne@7 958
jpayne@7 959 while len(self._decoded_buffer) < amt and data:
jpayne@7 960 # TODO make sure to initially read enough data to get past the headers
jpayne@7 961 # For example, the GZ file header takes 10 bytes, we don't want to read
jpayne@7 962 # it one byte at a time
jpayne@7 963 data = self._raw_read(amt)
jpayne@7 964 decoded_data = self._decode(data, decode_content, flush_decoder)
jpayne@7 965 self._decoded_buffer.put(decoded_data)
jpayne@7 966 data = self._decoded_buffer.get(amt)
jpayne@7 967
jpayne@7 968 return data
jpayne@7 969
jpayne@7 970 def read1(
jpayne@7 971 self,
jpayne@7 972 amt: int | None = None,
jpayne@7 973 decode_content: bool | None = None,
jpayne@7 974 ) -> bytes:
jpayne@7 975 """
jpayne@7 976 Similar to ``http.client.HTTPResponse.read1`` and documented
jpayne@7 977 in :meth:`io.BufferedReader.read1`, but with an additional parameter:
jpayne@7 978 ``decode_content``.
jpayne@7 979
jpayne@7 980 :param amt:
jpayne@7 981 How much of the content to read.
jpayne@7 982
jpayne@7 983 :param decode_content:
jpayne@7 984 If True, will attempt to decode the body based on the
jpayne@7 985 'content-encoding' header.
jpayne@7 986 """
jpayne@7 987 if decode_content is None:
jpayne@7 988 decode_content = self.decode_content
jpayne@7 989 # try and respond without going to the network
jpayne@7 990 if self._has_decoded_content:
jpayne@7 991 if not decode_content:
jpayne@7 992 raise RuntimeError(
jpayne@7 993 "Calling read1(decode_content=False) is not supported after "
jpayne@7 994 "read1(decode_content=True) was called."
jpayne@7 995 )
jpayne@7 996 if len(self._decoded_buffer) > 0:
jpayne@7 997 if amt is None:
jpayne@7 998 return self._decoded_buffer.get_all()
jpayne@7 999 return self._decoded_buffer.get(amt)
jpayne@7 1000 if amt == 0:
jpayne@7 1001 return b""
jpayne@7 1002
jpayne@7 1003 # FIXME, this method's type doesn't say returning None is possible
jpayne@7 1004 data = self._raw_read(amt, read1=True)
jpayne@7 1005 if not decode_content or data is None:
jpayne@7 1006 return data
jpayne@7 1007
jpayne@7 1008 self._init_decoder()
jpayne@7 1009 while True:
jpayne@7 1010 flush_decoder = not data
jpayne@7 1011 decoded_data = self._decode(data, decode_content, flush_decoder)
jpayne@7 1012 self._decoded_buffer.put(decoded_data)
jpayne@7 1013 if decoded_data or flush_decoder:
jpayne@7 1014 break
jpayne@7 1015 data = self._raw_read(8192, read1=True)
jpayne@7 1016
jpayne@7 1017 if amt is None:
jpayne@7 1018 return self._decoded_buffer.get_all()
jpayne@7 1019 return self._decoded_buffer.get(amt)
jpayne@7 1020
jpayne@7 1021 def stream(
jpayne@7 1022 self, amt: int | None = 2**16, decode_content: bool | None = None
jpayne@7 1023 ) -> typing.Generator[bytes, None, None]:
jpayne@7 1024 """
jpayne@7 1025 A generator wrapper for the read() method. A call will block until
jpayne@7 1026 ``amt`` bytes have been read from the connection or until the
jpayne@7 1027 connection is closed.
jpayne@7 1028
jpayne@7 1029 :param amt:
jpayne@7 1030 How much of the content to read. The generator will return up to
jpayne@7 1031 much data per iteration, but may return less. This is particularly
jpayne@7 1032 likely when using compressed data. However, the empty string will
jpayne@7 1033 never be returned.
jpayne@7 1034
jpayne@7 1035 :param decode_content:
jpayne@7 1036 If True, will attempt to decode the body based on the
jpayne@7 1037 'content-encoding' header.
jpayne@7 1038 """
jpayne@7 1039 if self.chunked and self.supports_chunked_reads():
jpayne@7 1040 yield from self.read_chunked(amt, decode_content=decode_content)
jpayne@7 1041 else:
jpayne@7 1042 while not is_fp_closed(self._fp) or len(self._decoded_buffer) > 0:
jpayne@7 1043 data = self.read(amt=amt, decode_content=decode_content)
jpayne@7 1044
jpayne@7 1045 if data:
jpayne@7 1046 yield data
jpayne@7 1047
jpayne@7 1048 # Overrides from io.IOBase
jpayne@7 1049 def readable(self) -> bool:
jpayne@7 1050 return True
jpayne@7 1051
jpayne@7 1052 def close(self) -> None:
jpayne@7 1053 if not self.closed and self._fp:
jpayne@7 1054 self._fp.close()
jpayne@7 1055
jpayne@7 1056 if self._connection:
jpayne@7 1057 self._connection.close()
jpayne@7 1058
jpayne@7 1059 if not self.auto_close:
jpayne@7 1060 io.IOBase.close(self)
jpayne@7 1061
jpayne@7 1062 @property
jpayne@7 1063 def closed(self) -> bool:
jpayne@7 1064 if not self.auto_close:
jpayne@7 1065 return io.IOBase.closed.__get__(self) # type: ignore[no-any-return]
jpayne@7 1066 elif self._fp is None:
jpayne@7 1067 return True
jpayne@7 1068 elif hasattr(self._fp, "isclosed"):
jpayne@7 1069 return self._fp.isclosed()
jpayne@7 1070 elif hasattr(self._fp, "closed"):
jpayne@7 1071 return self._fp.closed
jpayne@7 1072 else:
jpayne@7 1073 return True
jpayne@7 1074
jpayne@7 1075 def fileno(self) -> int:
jpayne@7 1076 if self._fp is None:
jpayne@7 1077 raise OSError("HTTPResponse has no file to get a fileno from")
jpayne@7 1078 elif hasattr(self._fp, "fileno"):
jpayne@7 1079 return self._fp.fileno()
jpayne@7 1080 else:
jpayne@7 1081 raise OSError(
jpayne@7 1082 "The file-like object this HTTPResponse is wrapped "
jpayne@7 1083 "around has no file descriptor"
jpayne@7 1084 )
jpayne@7 1085
jpayne@7 1086 def flush(self) -> None:
jpayne@7 1087 if (
jpayne@7 1088 self._fp is not None
jpayne@7 1089 and hasattr(self._fp, "flush")
jpayne@7 1090 and not getattr(self._fp, "closed", False)
jpayne@7 1091 ):
jpayne@7 1092 return self._fp.flush()
jpayne@7 1093
jpayne@7 1094 def supports_chunked_reads(self) -> bool:
jpayne@7 1095 """
jpayne@7 1096 Checks if the underlying file-like object looks like a
jpayne@7 1097 :class:`http.client.HTTPResponse` object. We do this by testing for
jpayne@7 1098 the fp attribute. If it is present we assume it returns raw chunks as
jpayne@7 1099 processed by read_chunked().
jpayne@7 1100 """
jpayne@7 1101 return hasattr(self._fp, "fp")
jpayne@7 1102
jpayne@7 1103 def _update_chunk_length(self) -> None:
jpayne@7 1104 # First, we'll figure out length of a chunk and then
jpayne@7 1105 # we'll try to read it from socket.
jpayne@7 1106 if self.chunk_left is not None:
jpayne@7 1107 return None
jpayne@7 1108 line = self._fp.fp.readline() # type: ignore[union-attr]
jpayne@7 1109 line = line.split(b";", 1)[0]
jpayne@7 1110 try:
jpayne@7 1111 self.chunk_left = int(line, 16)
jpayne@7 1112 except ValueError:
jpayne@7 1113 self.close()
jpayne@7 1114 if line:
jpayne@7 1115 # Invalid chunked protocol response, abort.
jpayne@7 1116 raise InvalidChunkLength(self, line) from None
jpayne@7 1117 else:
jpayne@7 1118 # Truncated at start of next chunk
jpayne@7 1119 raise ProtocolError("Response ended prematurely") from None
jpayne@7 1120
jpayne@7 1121 def _handle_chunk(self, amt: int | None) -> bytes:
jpayne@7 1122 returned_chunk = None
jpayne@7 1123 if amt is None:
jpayne@7 1124 chunk = self._fp._safe_read(self.chunk_left) # type: ignore[union-attr]
jpayne@7 1125 returned_chunk = chunk
jpayne@7 1126 self._fp._safe_read(2) # type: ignore[union-attr] # Toss the CRLF at the end of the chunk.
jpayne@7 1127 self.chunk_left = None
jpayne@7 1128 elif self.chunk_left is not None and amt < self.chunk_left:
jpayne@7 1129 value = self._fp._safe_read(amt) # type: ignore[union-attr]
jpayne@7 1130 self.chunk_left = self.chunk_left - amt
jpayne@7 1131 returned_chunk = value
jpayne@7 1132 elif amt == self.chunk_left:
jpayne@7 1133 value = self._fp._safe_read(amt) # type: ignore[union-attr]
jpayne@7 1134 self._fp._safe_read(2) # type: ignore[union-attr] # Toss the CRLF at the end of the chunk.
jpayne@7 1135 self.chunk_left = None
jpayne@7 1136 returned_chunk = value
jpayne@7 1137 else: # amt > self.chunk_left
jpayne@7 1138 returned_chunk = self._fp._safe_read(self.chunk_left) # type: ignore[union-attr]
jpayne@7 1139 self._fp._safe_read(2) # type: ignore[union-attr] # Toss the CRLF at the end of the chunk.
jpayne@7 1140 self.chunk_left = None
jpayne@7 1141 return returned_chunk # type: ignore[no-any-return]
jpayne@7 1142
jpayne@7 1143 def read_chunked(
jpayne@7 1144 self, amt: int | None = None, decode_content: bool | None = None
jpayne@7 1145 ) -> typing.Generator[bytes, None, None]:
jpayne@7 1146 """
jpayne@7 1147 Similar to :meth:`HTTPResponse.read`, but with an additional
jpayne@7 1148 parameter: ``decode_content``.
jpayne@7 1149
jpayne@7 1150 :param amt:
jpayne@7 1151 How much of the content to read. If specified, caching is skipped
jpayne@7 1152 because it doesn't make sense to cache partial content as the full
jpayne@7 1153 response.
jpayne@7 1154
jpayne@7 1155 :param decode_content:
jpayne@7 1156 If True, will attempt to decode the body based on the
jpayne@7 1157 'content-encoding' header.
jpayne@7 1158 """
jpayne@7 1159 self._init_decoder()
jpayne@7 1160 # FIXME: Rewrite this method and make it a class with a better structured logic.
jpayne@7 1161 if not self.chunked:
jpayne@7 1162 raise ResponseNotChunked(
jpayne@7 1163 "Response is not chunked. "
jpayne@7 1164 "Header 'transfer-encoding: chunked' is missing."
jpayne@7 1165 )
jpayne@7 1166 if not self.supports_chunked_reads():
jpayne@7 1167 raise BodyNotHttplibCompatible(
jpayne@7 1168 "Body should be http.client.HTTPResponse like. "
jpayne@7 1169 "It should have have an fp attribute which returns raw chunks."
jpayne@7 1170 )
jpayne@7 1171
jpayne@7 1172 with self._error_catcher():
jpayne@7 1173 # Don't bother reading the body of a HEAD request.
jpayne@7 1174 if self._original_response and is_response_to_head(self._original_response):
jpayne@7 1175 self._original_response.close()
jpayne@7 1176 return None
jpayne@7 1177
jpayne@7 1178 # If a response is already read and closed
jpayne@7 1179 # then return immediately.
jpayne@7 1180 if self._fp.fp is None: # type: ignore[union-attr]
jpayne@7 1181 return None
jpayne@7 1182
jpayne@7 1183 while True:
jpayne@7 1184 self._update_chunk_length()
jpayne@7 1185 if self.chunk_left == 0:
jpayne@7 1186 break
jpayne@7 1187 chunk = self._handle_chunk(amt)
jpayne@7 1188 decoded = self._decode(
jpayne@7 1189 chunk, decode_content=decode_content, flush_decoder=False
jpayne@7 1190 )
jpayne@7 1191 if decoded:
jpayne@7 1192 yield decoded
jpayne@7 1193
jpayne@7 1194 if decode_content:
jpayne@7 1195 # On CPython and PyPy, we should never need to flush the
jpayne@7 1196 # decoder. However, on Jython we *might* need to, so
jpayne@7 1197 # lets defensively do it anyway.
jpayne@7 1198 decoded = self._flush_decoder()
jpayne@7 1199 if decoded: # Platform-specific: Jython.
jpayne@7 1200 yield decoded
jpayne@7 1201
jpayne@7 1202 # Chunk content ends with \r\n: discard it.
jpayne@7 1203 while self._fp is not None:
jpayne@7 1204 line = self._fp.fp.readline()
jpayne@7 1205 if not line:
jpayne@7 1206 # Some sites may not end with '\r\n'.
jpayne@7 1207 break
jpayne@7 1208 if line == b"\r\n":
jpayne@7 1209 break
jpayne@7 1210
jpayne@7 1211 # We read everything; close the "file".
jpayne@7 1212 if self._original_response:
jpayne@7 1213 self._original_response.close()
jpayne@7 1214
jpayne@7 1215 @property
jpayne@7 1216 def url(self) -> str | None:
jpayne@7 1217 """
jpayne@7 1218 Returns the URL that was the source of this response.
jpayne@7 1219 If the request that generated this response redirected, this method
jpayne@7 1220 will return the final redirect location.
jpayne@7 1221 """
jpayne@7 1222 return self._request_url
jpayne@7 1223
jpayne@7 1224 @url.setter
jpayne@7 1225 def url(self, url: str) -> None:
jpayne@7 1226 self._request_url = url
jpayne@7 1227
jpayne@7 1228 def __iter__(self) -> typing.Iterator[bytes]:
jpayne@7 1229 buffer: list[bytes] = []
jpayne@7 1230 for chunk in self.stream(decode_content=True):
jpayne@7 1231 if b"\n" in chunk:
jpayne@7 1232 chunks = chunk.split(b"\n")
jpayne@7 1233 yield b"".join(buffer) + chunks[0] + b"\n"
jpayne@7 1234 for x in chunks[1:-1]:
jpayne@7 1235 yield x + b"\n"
jpayne@7 1236 if chunks[-1]:
jpayne@7 1237 buffer = [chunks[-1]]
jpayne@7 1238 else:
jpayne@7 1239 buffer = []
jpayne@7 1240 else:
jpayne@7 1241 buffer.append(chunk)
jpayne@7 1242 if buffer:
jpayne@7 1243 yield b"".join(buffer)