annotate CSP2/CSP2_env/env-d9b9114564458d9d-741b3de822f2aaca6c6caa4325c4afce/lib/python3.8/site-packages/urllib3/response.py @ 68:5028fdace37b

planemo upload commit 2e9511a184a1ca667c7be0c6321a36dc4e3d116d
author jpayne
date Tue, 18 Mar 2025 16:23:26 -0400
parents
children
rev   line source
jpayne@68 1 from __future__ import annotations
jpayne@68 2
jpayne@68 3 import collections
jpayne@68 4 import io
jpayne@68 5 import json as _json
jpayne@68 6 import logging
jpayne@68 7 import re
jpayne@68 8 import sys
jpayne@68 9 import typing
jpayne@68 10 import warnings
jpayne@68 11 import zlib
jpayne@68 12 from contextlib import contextmanager
jpayne@68 13 from http.client import HTTPMessage as _HttplibHTTPMessage
jpayne@68 14 from http.client import HTTPResponse as _HttplibHTTPResponse
jpayne@68 15 from socket import timeout as SocketTimeout
jpayne@68 16
jpayne@68 17 if typing.TYPE_CHECKING:
jpayne@68 18 from ._base_connection import BaseHTTPConnection
jpayne@68 19
jpayne@68 20 try:
jpayne@68 21 try:
jpayne@68 22 import brotlicffi as brotli # type: ignore[import-not-found]
jpayne@68 23 except ImportError:
jpayne@68 24 import brotli # type: ignore[import-not-found]
jpayne@68 25 except ImportError:
jpayne@68 26 brotli = None
jpayne@68 27
jpayne@68 28 try:
jpayne@68 29 import zstandard as zstd
jpayne@68 30 except (AttributeError, ImportError, ValueError): # Defensive:
jpayne@68 31 HAS_ZSTD = False
jpayne@68 32 else:
jpayne@68 33 # The package 'zstandard' added the 'eof' property starting
jpayne@68 34 # in v0.18.0 which we require to ensure a complete and
jpayne@68 35 # valid zstd stream was fed into the ZstdDecoder.
jpayne@68 36 # See: https://github.com/urllib3/urllib3/pull/2624
jpayne@68 37 _zstd_version = tuple(
jpayne@68 38 map(int, re.search(r"^([0-9]+)\.([0-9]+)", zstd.__version__).groups()) # type: ignore[union-attr]
jpayne@68 39 )
jpayne@68 40 if _zstd_version < (0, 18): # Defensive:
jpayne@68 41 HAS_ZSTD = False
jpayne@68 42 else:
jpayne@68 43 HAS_ZSTD = True
jpayne@68 44
jpayne@68 45 from . import util
jpayne@68 46 from ._base_connection import _TYPE_BODY
jpayne@68 47 from ._collections import HTTPHeaderDict
jpayne@68 48 from .connection import BaseSSLError, HTTPConnection, HTTPException
jpayne@68 49 from .exceptions import (
jpayne@68 50 BodyNotHttplibCompatible,
jpayne@68 51 DecodeError,
jpayne@68 52 HTTPError,
jpayne@68 53 IncompleteRead,
jpayne@68 54 InvalidChunkLength,
jpayne@68 55 InvalidHeader,
jpayne@68 56 ProtocolError,
jpayne@68 57 ReadTimeoutError,
jpayne@68 58 ResponseNotChunked,
jpayne@68 59 SSLError,
jpayne@68 60 )
jpayne@68 61 from .util.response import is_fp_closed, is_response_to_head
jpayne@68 62 from .util.retry import Retry
jpayne@68 63
jpayne@68 64 if typing.TYPE_CHECKING:
jpayne@68 65 from .connectionpool import HTTPConnectionPool
jpayne@68 66
jpayne@68 67 log = logging.getLogger(__name__)
jpayne@68 68
jpayne@68 69
jpayne@68 70 class ContentDecoder:
jpayne@68 71 def decompress(self, data: bytes) -> bytes:
jpayne@68 72 raise NotImplementedError()
jpayne@68 73
jpayne@68 74 def flush(self) -> bytes:
jpayne@68 75 raise NotImplementedError()
jpayne@68 76
jpayne@68 77
jpayne@68 78 class DeflateDecoder(ContentDecoder):
jpayne@68 79 def __init__(self) -> None:
jpayne@68 80 self._first_try = True
jpayne@68 81 self._data = b""
jpayne@68 82 self._obj = zlib.decompressobj()
jpayne@68 83
jpayne@68 84 def decompress(self, data: bytes) -> bytes:
jpayne@68 85 if not data:
jpayne@68 86 return data
jpayne@68 87
jpayne@68 88 if not self._first_try:
jpayne@68 89 return self._obj.decompress(data)
jpayne@68 90
jpayne@68 91 self._data += data
jpayne@68 92 try:
jpayne@68 93 decompressed = self._obj.decompress(data)
jpayne@68 94 if decompressed:
jpayne@68 95 self._first_try = False
jpayne@68 96 self._data = None # type: ignore[assignment]
jpayne@68 97 return decompressed
jpayne@68 98 except zlib.error:
jpayne@68 99 self._first_try = False
jpayne@68 100 self._obj = zlib.decompressobj(-zlib.MAX_WBITS)
jpayne@68 101 try:
jpayne@68 102 return self.decompress(self._data)
jpayne@68 103 finally:
jpayne@68 104 self._data = None # type: ignore[assignment]
jpayne@68 105
jpayne@68 106 def flush(self) -> bytes:
jpayne@68 107 return self._obj.flush()
jpayne@68 108
jpayne@68 109
jpayne@68 110 class GzipDecoderState:
jpayne@68 111 FIRST_MEMBER = 0
jpayne@68 112 OTHER_MEMBERS = 1
jpayne@68 113 SWALLOW_DATA = 2
jpayne@68 114
jpayne@68 115
jpayne@68 116 class GzipDecoder(ContentDecoder):
jpayne@68 117 def __init__(self) -> None:
jpayne@68 118 self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
jpayne@68 119 self._state = GzipDecoderState.FIRST_MEMBER
jpayne@68 120
jpayne@68 121 def decompress(self, data: bytes) -> bytes:
jpayne@68 122 ret = bytearray()
jpayne@68 123 if self._state == GzipDecoderState.SWALLOW_DATA or not data:
jpayne@68 124 return bytes(ret)
jpayne@68 125 while True:
jpayne@68 126 try:
jpayne@68 127 ret += self._obj.decompress(data)
jpayne@68 128 except zlib.error:
jpayne@68 129 previous_state = self._state
jpayne@68 130 # Ignore data after the first error
jpayne@68 131 self._state = GzipDecoderState.SWALLOW_DATA
jpayne@68 132 if previous_state == GzipDecoderState.OTHER_MEMBERS:
jpayne@68 133 # Allow trailing garbage acceptable in other gzip clients
jpayne@68 134 return bytes(ret)
jpayne@68 135 raise
jpayne@68 136 data = self._obj.unused_data
jpayne@68 137 if not data:
jpayne@68 138 return bytes(ret)
jpayne@68 139 self._state = GzipDecoderState.OTHER_MEMBERS
jpayne@68 140 self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
jpayne@68 141
jpayne@68 142 def flush(self) -> bytes:
jpayne@68 143 return self._obj.flush()
jpayne@68 144
jpayne@68 145
jpayne@68 146 if brotli is not None:
jpayne@68 147
jpayne@68 148 class BrotliDecoder(ContentDecoder):
jpayne@68 149 # Supports both 'brotlipy' and 'Brotli' packages
jpayne@68 150 # since they share an import name. The top branches
jpayne@68 151 # are for 'brotlipy' and bottom branches for 'Brotli'
jpayne@68 152 def __init__(self) -> None:
jpayne@68 153 self._obj = brotli.Decompressor()
jpayne@68 154 if hasattr(self._obj, "decompress"):
jpayne@68 155 setattr(self, "decompress", self._obj.decompress)
jpayne@68 156 else:
jpayne@68 157 setattr(self, "decompress", self._obj.process)
jpayne@68 158
jpayne@68 159 def flush(self) -> bytes:
jpayne@68 160 if hasattr(self._obj, "flush"):
jpayne@68 161 return self._obj.flush() # type: ignore[no-any-return]
jpayne@68 162 return b""
jpayne@68 163
jpayne@68 164
jpayne@68 165 if HAS_ZSTD:
jpayne@68 166
jpayne@68 167 class ZstdDecoder(ContentDecoder):
jpayne@68 168 def __init__(self) -> None:
jpayne@68 169 self._obj = zstd.ZstdDecompressor().decompressobj()
jpayne@68 170
jpayne@68 171 def decompress(self, data: bytes) -> bytes:
jpayne@68 172 if not data:
jpayne@68 173 return b""
jpayne@68 174 data_parts = [self._obj.decompress(data)]
jpayne@68 175 while self._obj.eof and self._obj.unused_data:
jpayne@68 176 unused_data = self._obj.unused_data
jpayne@68 177 self._obj = zstd.ZstdDecompressor().decompressobj()
jpayne@68 178 data_parts.append(self._obj.decompress(unused_data))
jpayne@68 179 return b"".join(data_parts)
jpayne@68 180
jpayne@68 181 def flush(self) -> bytes:
jpayne@68 182 ret = self._obj.flush() # note: this is a no-op
jpayne@68 183 if not self._obj.eof:
jpayne@68 184 raise DecodeError("Zstandard data is incomplete")
jpayne@68 185 return ret
jpayne@68 186
jpayne@68 187
jpayne@68 188 class MultiDecoder(ContentDecoder):
jpayne@68 189 """
jpayne@68 190 From RFC7231:
jpayne@68 191 If one or more encodings have been applied to a representation, the
jpayne@68 192 sender that applied the encodings MUST generate a Content-Encoding
jpayne@68 193 header field that lists the content codings in the order in which
jpayne@68 194 they were applied.
jpayne@68 195 """
jpayne@68 196
jpayne@68 197 def __init__(self, modes: str) -> None:
jpayne@68 198 self._decoders = [_get_decoder(m.strip()) for m in modes.split(",")]
jpayne@68 199
jpayne@68 200 def flush(self) -> bytes:
jpayne@68 201 return self._decoders[0].flush()
jpayne@68 202
jpayne@68 203 def decompress(self, data: bytes) -> bytes:
jpayne@68 204 for d in reversed(self._decoders):
jpayne@68 205 data = d.decompress(data)
jpayne@68 206 return data
jpayne@68 207
jpayne@68 208
jpayne@68 209 def _get_decoder(mode: str) -> ContentDecoder:
jpayne@68 210 if "," in mode:
jpayne@68 211 return MultiDecoder(mode)
jpayne@68 212
jpayne@68 213 # According to RFC 9110 section 8.4.1.3, recipients should
jpayne@68 214 # consider x-gzip equivalent to gzip
jpayne@68 215 if mode in ("gzip", "x-gzip"):
jpayne@68 216 return GzipDecoder()
jpayne@68 217
jpayne@68 218 if brotli is not None and mode == "br":
jpayne@68 219 return BrotliDecoder()
jpayne@68 220
jpayne@68 221 if HAS_ZSTD and mode == "zstd":
jpayne@68 222 return ZstdDecoder()
jpayne@68 223
jpayne@68 224 return DeflateDecoder()
jpayne@68 225
jpayne@68 226
jpayne@68 227 class BytesQueueBuffer:
jpayne@68 228 """Memory-efficient bytes buffer
jpayne@68 229
jpayne@68 230 To return decoded data in read() and still follow the BufferedIOBase API, we need a
jpayne@68 231 buffer to always return the correct amount of bytes.
jpayne@68 232
jpayne@68 233 This buffer should be filled using calls to put()
jpayne@68 234
jpayne@68 235 Our maximum memory usage is determined by the sum of the size of:
jpayne@68 236
jpayne@68 237 * self.buffer, which contains the full data
jpayne@68 238 * the largest chunk that we will copy in get()
jpayne@68 239
jpayne@68 240 The worst case scenario is a single chunk, in which case we'll make a full copy of
jpayne@68 241 the data inside get().
jpayne@68 242 """
jpayne@68 243
jpayne@68 244 def __init__(self) -> None:
jpayne@68 245 self.buffer: typing.Deque[bytes] = collections.deque()
jpayne@68 246 self._size: int = 0
jpayne@68 247
jpayne@68 248 def __len__(self) -> int:
jpayne@68 249 return self._size
jpayne@68 250
jpayne@68 251 def put(self, data: bytes) -> None:
jpayne@68 252 self.buffer.append(data)
jpayne@68 253 self._size += len(data)
jpayne@68 254
jpayne@68 255 def get(self, n: int) -> bytes:
jpayne@68 256 if n == 0:
jpayne@68 257 return b""
jpayne@68 258 elif not self.buffer:
jpayne@68 259 raise RuntimeError("buffer is empty")
jpayne@68 260 elif n < 0:
jpayne@68 261 raise ValueError("n should be > 0")
jpayne@68 262
jpayne@68 263 fetched = 0
jpayne@68 264 ret = io.BytesIO()
jpayne@68 265 while fetched < n:
jpayne@68 266 remaining = n - fetched
jpayne@68 267 chunk = self.buffer.popleft()
jpayne@68 268 chunk_length = len(chunk)
jpayne@68 269 if remaining < chunk_length:
jpayne@68 270 left_chunk, right_chunk = chunk[:remaining], chunk[remaining:]
jpayne@68 271 ret.write(left_chunk)
jpayne@68 272 self.buffer.appendleft(right_chunk)
jpayne@68 273 self._size -= remaining
jpayne@68 274 break
jpayne@68 275 else:
jpayne@68 276 ret.write(chunk)
jpayne@68 277 self._size -= chunk_length
jpayne@68 278 fetched += chunk_length
jpayne@68 279
jpayne@68 280 if not self.buffer:
jpayne@68 281 break
jpayne@68 282
jpayne@68 283 return ret.getvalue()
jpayne@68 284
jpayne@68 285 def get_all(self) -> bytes:
jpayne@68 286 buffer = self.buffer
jpayne@68 287 if not buffer:
jpayne@68 288 assert self._size == 0
jpayne@68 289 return b""
jpayne@68 290 if len(buffer) == 1:
jpayne@68 291 result = buffer.pop()
jpayne@68 292 else:
jpayne@68 293 ret = io.BytesIO()
jpayne@68 294 ret.writelines(buffer.popleft() for _ in range(len(buffer)))
jpayne@68 295 result = ret.getvalue()
jpayne@68 296 self._size = 0
jpayne@68 297 return result
jpayne@68 298
jpayne@68 299
jpayne@68 300 class BaseHTTPResponse(io.IOBase):
jpayne@68 301 CONTENT_DECODERS = ["gzip", "x-gzip", "deflate"]
jpayne@68 302 if brotli is not None:
jpayne@68 303 CONTENT_DECODERS += ["br"]
jpayne@68 304 if HAS_ZSTD:
jpayne@68 305 CONTENT_DECODERS += ["zstd"]
jpayne@68 306 REDIRECT_STATUSES = [301, 302, 303, 307, 308]
jpayne@68 307
jpayne@68 308 DECODER_ERROR_CLASSES: tuple[type[Exception], ...] = (IOError, zlib.error)
jpayne@68 309 if brotli is not None:
jpayne@68 310 DECODER_ERROR_CLASSES += (brotli.error,)
jpayne@68 311
jpayne@68 312 if HAS_ZSTD:
jpayne@68 313 DECODER_ERROR_CLASSES += (zstd.ZstdError,)
jpayne@68 314
jpayne@68 315 def __init__(
jpayne@68 316 self,
jpayne@68 317 *,
jpayne@68 318 headers: typing.Mapping[str, str] | typing.Mapping[bytes, bytes] | None = None,
jpayne@68 319 status: int,
jpayne@68 320 version: int,
jpayne@68 321 version_string: str,
jpayne@68 322 reason: str | None,
jpayne@68 323 decode_content: bool,
jpayne@68 324 request_url: str | None,
jpayne@68 325 retries: Retry | None = None,
jpayne@68 326 ) -> None:
jpayne@68 327 if isinstance(headers, HTTPHeaderDict):
jpayne@68 328 self.headers = headers
jpayne@68 329 else:
jpayne@68 330 self.headers = HTTPHeaderDict(headers) # type: ignore[arg-type]
jpayne@68 331 self.status = status
jpayne@68 332 self.version = version
jpayne@68 333 self.version_string = version_string
jpayne@68 334 self.reason = reason
jpayne@68 335 self.decode_content = decode_content
jpayne@68 336 self._has_decoded_content = False
jpayne@68 337 self._request_url: str | None = request_url
jpayne@68 338 self.retries = retries
jpayne@68 339
jpayne@68 340 self.chunked = False
jpayne@68 341 tr_enc = self.headers.get("transfer-encoding", "").lower()
jpayne@68 342 # Don't incur the penalty of creating a list and then discarding it
jpayne@68 343 encodings = (enc.strip() for enc in tr_enc.split(","))
jpayne@68 344 if "chunked" in encodings:
jpayne@68 345 self.chunked = True
jpayne@68 346
jpayne@68 347 self._decoder: ContentDecoder | None = None
jpayne@68 348 self.length_remaining: int | None
jpayne@68 349
jpayne@68 350 def get_redirect_location(self) -> str | None | typing.Literal[False]:
jpayne@68 351 """
jpayne@68 352 Should we redirect and where to?
jpayne@68 353
jpayne@68 354 :returns: Truthy redirect location string if we got a redirect status
jpayne@68 355 code and valid location. ``None`` if redirect status and no
jpayne@68 356 location. ``False`` if not a redirect status code.
jpayne@68 357 """
jpayne@68 358 if self.status in self.REDIRECT_STATUSES:
jpayne@68 359 return self.headers.get("location")
jpayne@68 360 return False
jpayne@68 361
jpayne@68 362 @property
jpayne@68 363 def data(self) -> bytes:
jpayne@68 364 raise NotImplementedError()
jpayne@68 365
jpayne@68 366 def json(self) -> typing.Any:
jpayne@68 367 """
jpayne@68 368 Deserializes the body of the HTTP response as a Python object.
jpayne@68 369
jpayne@68 370 The body of the HTTP response must be encoded using UTF-8, as per
jpayne@68 371 `RFC 8529 Section 8.1 <https://www.rfc-editor.org/rfc/rfc8259#section-8.1>`_.
jpayne@68 372
jpayne@68 373 To use a custom JSON decoder pass the result of :attr:`HTTPResponse.data` to
jpayne@68 374 your custom decoder instead.
jpayne@68 375
jpayne@68 376 If the body of the HTTP response is not decodable to UTF-8, a
jpayne@68 377 `UnicodeDecodeError` will be raised. If the body of the HTTP response is not a
jpayne@68 378 valid JSON document, a `json.JSONDecodeError` will be raised.
jpayne@68 379
jpayne@68 380 Read more :ref:`here <json_content>`.
jpayne@68 381
jpayne@68 382 :returns: The body of the HTTP response as a Python object.
jpayne@68 383 """
jpayne@68 384 data = self.data.decode("utf-8")
jpayne@68 385 return _json.loads(data)
jpayne@68 386
jpayne@68 387 @property
jpayne@68 388 def url(self) -> str | None:
jpayne@68 389 raise NotImplementedError()
jpayne@68 390
jpayne@68 391 @url.setter
jpayne@68 392 def url(self, url: str | None) -> None:
jpayne@68 393 raise NotImplementedError()
jpayne@68 394
jpayne@68 395 @property
jpayne@68 396 def connection(self) -> BaseHTTPConnection | None:
jpayne@68 397 raise NotImplementedError()
jpayne@68 398
jpayne@68 399 @property
jpayne@68 400 def retries(self) -> Retry | None:
jpayne@68 401 return self._retries
jpayne@68 402
jpayne@68 403 @retries.setter
jpayne@68 404 def retries(self, retries: Retry | None) -> None:
jpayne@68 405 # Override the request_url if retries has a redirect location.
jpayne@68 406 if retries is not None and retries.history:
jpayne@68 407 self.url = retries.history[-1].redirect_location
jpayne@68 408 self._retries = retries
jpayne@68 409
jpayne@68 410 def stream(
jpayne@68 411 self, amt: int | None = 2**16, decode_content: bool | None = None
jpayne@68 412 ) -> typing.Iterator[bytes]:
jpayne@68 413 raise NotImplementedError()
jpayne@68 414
jpayne@68 415 def read(
jpayne@68 416 self,
jpayne@68 417 amt: int | None = None,
jpayne@68 418 decode_content: bool | None = None,
jpayne@68 419 cache_content: bool = False,
jpayne@68 420 ) -> bytes:
jpayne@68 421 raise NotImplementedError()
jpayne@68 422
jpayne@68 423 def read1(
jpayne@68 424 self,
jpayne@68 425 amt: int | None = None,
jpayne@68 426 decode_content: bool | None = None,
jpayne@68 427 ) -> bytes:
jpayne@68 428 raise NotImplementedError()
jpayne@68 429
jpayne@68 430 def read_chunked(
jpayne@68 431 self,
jpayne@68 432 amt: int | None = None,
jpayne@68 433 decode_content: bool | None = None,
jpayne@68 434 ) -> typing.Iterator[bytes]:
jpayne@68 435 raise NotImplementedError()
jpayne@68 436
jpayne@68 437 def release_conn(self) -> None:
jpayne@68 438 raise NotImplementedError()
jpayne@68 439
jpayne@68 440 def drain_conn(self) -> None:
jpayne@68 441 raise NotImplementedError()
jpayne@68 442
jpayne@68 443 def close(self) -> None:
jpayne@68 444 raise NotImplementedError()
jpayne@68 445
jpayne@68 446 def _init_decoder(self) -> None:
jpayne@68 447 """
jpayne@68 448 Set-up the _decoder attribute if necessary.
jpayne@68 449 """
jpayne@68 450 # Note: content-encoding value should be case-insensitive, per RFC 7230
jpayne@68 451 # Section 3.2
jpayne@68 452 content_encoding = self.headers.get("content-encoding", "").lower()
jpayne@68 453 if self._decoder is None:
jpayne@68 454 if content_encoding in self.CONTENT_DECODERS:
jpayne@68 455 self._decoder = _get_decoder(content_encoding)
jpayne@68 456 elif "," in content_encoding:
jpayne@68 457 encodings = [
jpayne@68 458 e.strip()
jpayne@68 459 for e in content_encoding.split(",")
jpayne@68 460 if e.strip() in self.CONTENT_DECODERS
jpayne@68 461 ]
jpayne@68 462 if encodings:
jpayne@68 463 self._decoder = _get_decoder(content_encoding)
jpayne@68 464
jpayne@68 465 def _decode(
jpayne@68 466 self, data: bytes, decode_content: bool | None, flush_decoder: bool
jpayne@68 467 ) -> bytes:
jpayne@68 468 """
jpayne@68 469 Decode the data passed in and potentially flush the decoder.
jpayne@68 470 """
jpayne@68 471 if not decode_content:
jpayne@68 472 if self._has_decoded_content:
jpayne@68 473 raise RuntimeError(
jpayne@68 474 "Calling read(decode_content=False) is not supported after "
jpayne@68 475 "read(decode_content=True) was called."
jpayne@68 476 )
jpayne@68 477 return data
jpayne@68 478
jpayne@68 479 try:
jpayne@68 480 if self._decoder:
jpayne@68 481 data = self._decoder.decompress(data)
jpayne@68 482 self._has_decoded_content = True
jpayne@68 483 except self.DECODER_ERROR_CLASSES as e:
jpayne@68 484 content_encoding = self.headers.get("content-encoding", "").lower()
jpayne@68 485 raise DecodeError(
jpayne@68 486 "Received response with content-encoding: %s, but "
jpayne@68 487 "failed to decode it." % content_encoding,
jpayne@68 488 e,
jpayne@68 489 ) from e
jpayne@68 490 if flush_decoder:
jpayne@68 491 data += self._flush_decoder()
jpayne@68 492
jpayne@68 493 return data
jpayne@68 494
jpayne@68 495 def _flush_decoder(self) -> bytes:
jpayne@68 496 """
jpayne@68 497 Flushes the decoder. Should only be called if the decoder is actually
jpayne@68 498 being used.
jpayne@68 499 """
jpayne@68 500 if self._decoder:
jpayne@68 501 return self._decoder.decompress(b"") + self._decoder.flush()
jpayne@68 502 return b""
jpayne@68 503
jpayne@68 504 # Compatibility methods for `io` module
jpayne@68 505 def readinto(self, b: bytearray) -> int:
jpayne@68 506 temp = self.read(len(b))
jpayne@68 507 if len(temp) == 0:
jpayne@68 508 return 0
jpayne@68 509 else:
jpayne@68 510 b[: len(temp)] = temp
jpayne@68 511 return len(temp)
jpayne@68 512
jpayne@68 513 # Compatibility methods for http.client.HTTPResponse
jpayne@68 514 def getheaders(self) -> HTTPHeaderDict:
jpayne@68 515 warnings.warn(
jpayne@68 516 "HTTPResponse.getheaders() is deprecated and will be removed "
jpayne@68 517 "in urllib3 v2.1.0. Instead access HTTPResponse.headers directly.",
jpayne@68 518 category=DeprecationWarning,
jpayne@68 519 stacklevel=2,
jpayne@68 520 )
jpayne@68 521 return self.headers
jpayne@68 522
jpayne@68 523 def getheader(self, name: str, default: str | None = None) -> str | None:
jpayne@68 524 warnings.warn(
jpayne@68 525 "HTTPResponse.getheader() is deprecated and will be removed "
jpayne@68 526 "in urllib3 v2.1.0. Instead use HTTPResponse.headers.get(name, default).",
jpayne@68 527 category=DeprecationWarning,
jpayne@68 528 stacklevel=2,
jpayne@68 529 )
jpayne@68 530 return self.headers.get(name, default)
jpayne@68 531
jpayne@68 532 # Compatibility method for http.cookiejar
jpayne@68 533 def info(self) -> HTTPHeaderDict:
jpayne@68 534 return self.headers
jpayne@68 535
jpayne@68 536 def geturl(self) -> str | None:
jpayne@68 537 return self.url
jpayne@68 538
jpayne@68 539
jpayne@68 540 class HTTPResponse(BaseHTTPResponse):
jpayne@68 541 """
jpayne@68 542 HTTP Response container.
jpayne@68 543
jpayne@68 544 Backwards-compatible with :class:`http.client.HTTPResponse` but the response ``body`` is
jpayne@68 545 loaded and decoded on-demand when the ``data`` property is accessed. This
jpayne@68 546 class is also compatible with the Python standard library's :mod:`io`
jpayne@68 547 module, and can hence be treated as a readable object in the context of that
jpayne@68 548 framework.
jpayne@68 549
jpayne@68 550 Extra parameters for behaviour not present in :class:`http.client.HTTPResponse`:
jpayne@68 551
jpayne@68 552 :param preload_content:
jpayne@68 553 If True, the response's body will be preloaded during construction.
jpayne@68 554
jpayne@68 555 :param decode_content:
jpayne@68 556 If True, will attempt to decode the body based on the
jpayne@68 557 'content-encoding' header.
jpayne@68 558
jpayne@68 559 :param original_response:
jpayne@68 560 When this HTTPResponse wrapper is generated from an :class:`http.client.HTTPResponse`
jpayne@68 561 object, it's convenient to include the original for debug purposes. It's
jpayne@68 562 otherwise unused.
jpayne@68 563
jpayne@68 564 :param retries:
jpayne@68 565 The retries contains the last :class:`~urllib3.util.retry.Retry` that
jpayne@68 566 was used during the request.
jpayne@68 567
jpayne@68 568 :param enforce_content_length:
jpayne@68 569 Enforce content length checking. Body returned by server must match
jpayne@68 570 value of Content-Length header, if present. Otherwise, raise error.
jpayne@68 571 """
jpayne@68 572
jpayne@68 573 def __init__(
jpayne@68 574 self,
jpayne@68 575 body: _TYPE_BODY = "",
jpayne@68 576 headers: typing.Mapping[str, str] | typing.Mapping[bytes, bytes] | None = None,
jpayne@68 577 status: int = 0,
jpayne@68 578 version: int = 0,
jpayne@68 579 version_string: str = "HTTP/?",
jpayne@68 580 reason: str | None = None,
jpayne@68 581 preload_content: bool = True,
jpayne@68 582 decode_content: bool = True,
jpayne@68 583 original_response: _HttplibHTTPResponse | None = None,
jpayne@68 584 pool: HTTPConnectionPool | None = None,
jpayne@68 585 connection: HTTPConnection | None = None,
jpayne@68 586 msg: _HttplibHTTPMessage | None = None,
jpayne@68 587 retries: Retry | None = None,
jpayne@68 588 enforce_content_length: bool = True,
jpayne@68 589 request_method: str | None = None,
jpayne@68 590 request_url: str | None = None,
jpayne@68 591 auto_close: bool = True,
jpayne@68 592 ) -> None:
jpayne@68 593 super().__init__(
jpayne@68 594 headers=headers,
jpayne@68 595 status=status,
jpayne@68 596 version=version,
jpayne@68 597 version_string=version_string,
jpayne@68 598 reason=reason,
jpayne@68 599 decode_content=decode_content,
jpayne@68 600 request_url=request_url,
jpayne@68 601 retries=retries,
jpayne@68 602 )
jpayne@68 603
jpayne@68 604 self.enforce_content_length = enforce_content_length
jpayne@68 605 self.auto_close = auto_close
jpayne@68 606
jpayne@68 607 self._body = None
jpayne@68 608 self._fp: _HttplibHTTPResponse | None = None
jpayne@68 609 self._original_response = original_response
jpayne@68 610 self._fp_bytes_read = 0
jpayne@68 611 self.msg = msg
jpayne@68 612
jpayne@68 613 if body and isinstance(body, (str, bytes)):
jpayne@68 614 self._body = body
jpayne@68 615
jpayne@68 616 self._pool = pool
jpayne@68 617 self._connection = connection
jpayne@68 618
jpayne@68 619 if hasattr(body, "read"):
jpayne@68 620 self._fp = body # type: ignore[assignment]
jpayne@68 621
jpayne@68 622 # Are we using the chunked-style of transfer encoding?
jpayne@68 623 self.chunk_left: int | None = None
jpayne@68 624
jpayne@68 625 # Determine length of response
jpayne@68 626 self.length_remaining = self._init_length(request_method)
jpayne@68 627
jpayne@68 628 # Used to return the correct amount of bytes for partial read()s
jpayne@68 629 self._decoded_buffer = BytesQueueBuffer()
jpayne@68 630
jpayne@68 631 # If requested, preload the body.
jpayne@68 632 if preload_content and not self._body:
jpayne@68 633 self._body = self.read(decode_content=decode_content)
jpayne@68 634
jpayne@68 635 def release_conn(self) -> None:
jpayne@68 636 if not self._pool or not self._connection:
jpayne@68 637 return None
jpayne@68 638
jpayne@68 639 self._pool._put_conn(self._connection)
jpayne@68 640 self._connection = None
jpayne@68 641
jpayne@68 642 def drain_conn(self) -> None:
jpayne@68 643 """
jpayne@68 644 Read and discard any remaining HTTP response data in the response connection.
jpayne@68 645
jpayne@68 646 Unread data in the HTTPResponse connection blocks the connection from being released back to the pool.
jpayne@68 647 """
jpayne@68 648 try:
jpayne@68 649 self.read()
jpayne@68 650 except (HTTPError, OSError, BaseSSLError, HTTPException):
jpayne@68 651 pass
jpayne@68 652
jpayne@68 653 @property
jpayne@68 654 def data(self) -> bytes:
jpayne@68 655 # For backwards-compat with earlier urllib3 0.4 and earlier.
jpayne@68 656 if self._body:
jpayne@68 657 return self._body # type: ignore[return-value]
jpayne@68 658
jpayne@68 659 if self._fp:
jpayne@68 660 return self.read(cache_content=True)
jpayne@68 661
jpayne@68 662 return None # type: ignore[return-value]
jpayne@68 663
jpayne@68 664 @property
jpayne@68 665 def connection(self) -> HTTPConnection | None:
jpayne@68 666 return self._connection
jpayne@68 667
jpayne@68 668 def isclosed(self) -> bool:
jpayne@68 669 return is_fp_closed(self._fp)
jpayne@68 670
jpayne@68 671 def tell(self) -> int:
jpayne@68 672 """
jpayne@68 673 Obtain the number of bytes pulled over the wire so far. May differ from
jpayne@68 674 the amount of content returned by :meth:``urllib3.response.HTTPResponse.read``
jpayne@68 675 if bytes are encoded on the wire (e.g, compressed).
jpayne@68 676 """
jpayne@68 677 return self._fp_bytes_read
jpayne@68 678
jpayne@68 679 def _init_length(self, request_method: str | None) -> int | None:
jpayne@68 680 """
jpayne@68 681 Set initial length value for Response content if available.
jpayne@68 682 """
jpayne@68 683 length: int | None
jpayne@68 684 content_length: str | None = self.headers.get("content-length")
jpayne@68 685
jpayne@68 686 if content_length is not None:
jpayne@68 687 if self.chunked:
jpayne@68 688 # This Response will fail with an IncompleteRead if it can't be
jpayne@68 689 # received as chunked. This method falls back to attempt reading
jpayne@68 690 # the response before raising an exception.
jpayne@68 691 log.warning(
jpayne@68 692 "Received response with both Content-Length and "
jpayne@68 693 "Transfer-Encoding set. This is expressly forbidden "
jpayne@68 694 "by RFC 7230 sec 3.3.2. Ignoring Content-Length and "
jpayne@68 695 "attempting to process response as Transfer-Encoding: "
jpayne@68 696 "chunked."
jpayne@68 697 )
jpayne@68 698 return None
jpayne@68 699
jpayne@68 700 try:
jpayne@68 701 # RFC 7230 section 3.3.2 specifies multiple content lengths can
jpayne@68 702 # be sent in a single Content-Length header
jpayne@68 703 # (e.g. Content-Length: 42, 42). This line ensures the values
jpayne@68 704 # are all valid ints and that as long as the `set` length is 1,
jpayne@68 705 # all values are the same. Otherwise, the header is invalid.
jpayne@68 706 lengths = {int(val) for val in content_length.split(",")}
jpayne@68 707 if len(lengths) > 1:
jpayne@68 708 raise InvalidHeader(
jpayne@68 709 "Content-Length contained multiple "
jpayne@68 710 "unmatching values (%s)" % content_length
jpayne@68 711 )
jpayne@68 712 length = lengths.pop()
jpayne@68 713 except ValueError:
jpayne@68 714 length = None
jpayne@68 715 else:
jpayne@68 716 if length < 0:
jpayne@68 717 length = None
jpayne@68 718
jpayne@68 719 else: # if content_length is None
jpayne@68 720 length = None
jpayne@68 721
jpayne@68 722 # Convert status to int for comparison
jpayne@68 723 # In some cases, httplib returns a status of "_UNKNOWN"
jpayne@68 724 try:
jpayne@68 725 status = int(self.status)
jpayne@68 726 except ValueError:
jpayne@68 727 status = 0
jpayne@68 728
jpayne@68 729 # Check for responses that shouldn't include a body
jpayne@68 730 if status in (204, 304) or 100 <= status < 200 or request_method == "HEAD":
jpayne@68 731 length = 0
jpayne@68 732
jpayne@68 733 return length
jpayne@68 734
jpayne@68 735 @contextmanager
jpayne@68 736 def _error_catcher(self) -> typing.Generator[None, None, None]:
jpayne@68 737 """
jpayne@68 738 Catch low-level python exceptions, instead re-raising urllib3
jpayne@68 739 variants, so that low-level exceptions are not leaked in the
jpayne@68 740 high-level api.
jpayne@68 741
jpayne@68 742 On exit, release the connection back to the pool.
jpayne@68 743 """
jpayne@68 744 clean_exit = False
jpayne@68 745
jpayne@68 746 try:
jpayne@68 747 try:
jpayne@68 748 yield
jpayne@68 749
jpayne@68 750 except SocketTimeout as e:
jpayne@68 751 # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but
jpayne@68 752 # there is yet no clean way to get at it from this context.
jpayne@68 753 raise ReadTimeoutError(self._pool, None, "Read timed out.") from e # type: ignore[arg-type]
jpayne@68 754
jpayne@68 755 except BaseSSLError as e:
jpayne@68 756 # FIXME: Is there a better way to differentiate between SSLErrors?
jpayne@68 757 if "read operation timed out" not in str(e):
jpayne@68 758 # SSL errors related to framing/MAC get wrapped and reraised here
jpayne@68 759 raise SSLError(e) from e
jpayne@68 760
jpayne@68 761 raise ReadTimeoutError(self._pool, None, "Read timed out.") from e # type: ignore[arg-type]
jpayne@68 762
jpayne@68 763 except IncompleteRead as e:
jpayne@68 764 if (
jpayne@68 765 e.expected is not None
jpayne@68 766 and e.partial is not None
jpayne@68 767 and e.expected == -e.partial
jpayne@68 768 ):
jpayne@68 769 arg = "Response may not contain content."
jpayne@68 770 else:
jpayne@68 771 arg = f"Connection broken: {e!r}"
jpayne@68 772 raise ProtocolError(arg, e) from e
jpayne@68 773
jpayne@68 774 except (HTTPException, OSError) as e:
jpayne@68 775 raise ProtocolError(f"Connection broken: {e!r}", e) from e
jpayne@68 776
jpayne@68 777 # If no exception is thrown, we should avoid cleaning up
jpayne@68 778 # unnecessarily.
jpayne@68 779 clean_exit = True
jpayne@68 780 finally:
jpayne@68 781 # If we didn't terminate cleanly, we need to throw away our
jpayne@68 782 # connection.
jpayne@68 783 if not clean_exit:
jpayne@68 784 # The response may not be closed but we're not going to use it
jpayne@68 785 # anymore so close it now to ensure that the connection is
jpayne@68 786 # released back to the pool.
jpayne@68 787 if self._original_response:
jpayne@68 788 self._original_response.close()
jpayne@68 789
jpayne@68 790 # Closing the response may not actually be sufficient to close
jpayne@68 791 # everything, so if we have a hold of the connection close that
jpayne@68 792 # too.
jpayne@68 793 if self._connection:
jpayne@68 794 self._connection.close()
jpayne@68 795
jpayne@68 796 # If we hold the original response but it's closed now, we should
jpayne@68 797 # return the connection back to the pool.
jpayne@68 798 if self._original_response and self._original_response.isclosed():
jpayne@68 799 self.release_conn()
jpayne@68 800
jpayne@68 801 def _fp_read(
jpayne@68 802 self,
jpayne@68 803 amt: int | None = None,
jpayne@68 804 *,
jpayne@68 805 read1: bool = False,
jpayne@68 806 ) -> bytes:
jpayne@68 807 """
jpayne@68 808 Read a response with the thought that reading the number of bytes
jpayne@68 809 larger than can fit in a 32-bit int at a time via SSL in some
jpayne@68 810 known cases leads to an overflow error that has to be prevented
jpayne@68 811 if `amt` or `self.length_remaining` indicate that a problem may
jpayne@68 812 happen.
jpayne@68 813
jpayne@68 814 The known cases:
jpayne@68 815 * 3.8 <= CPython < 3.9.7 because of a bug
jpayne@68 816 https://github.com/urllib3/urllib3/issues/2513#issuecomment-1152559900.
jpayne@68 817 * urllib3 injected with pyOpenSSL-backed SSL-support.
jpayne@68 818 * CPython < 3.10 only when `amt` does not fit 32-bit int.
jpayne@68 819 """
jpayne@68 820 assert self._fp
jpayne@68 821 c_int_max = 2**31 - 1
jpayne@68 822 if (
jpayne@68 823 (amt and amt > c_int_max)
jpayne@68 824 or (
jpayne@68 825 amt is None
jpayne@68 826 and self.length_remaining
jpayne@68 827 and self.length_remaining > c_int_max
jpayne@68 828 )
jpayne@68 829 ) and (util.IS_PYOPENSSL or sys.version_info < (3, 10)):
jpayne@68 830 if read1:
jpayne@68 831 return self._fp.read1(c_int_max)
jpayne@68 832 buffer = io.BytesIO()
jpayne@68 833 # Besides `max_chunk_amt` being a maximum chunk size, it
jpayne@68 834 # affects memory overhead of reading a response by this
jpayne@68 835 # method in CPython.
jpayne@68 836 # `c_int_max` equal to 2 GiB - 1 byte is the actual maximum
jpayne@68 837 # chunk size that does not lead to an overflow error, but
jpayne@68 838 # 256 MiB is a compromise.
jpayne@68 839 max_chunk_amt = 2**28
jpayne@68 840 while amt is None or amt != 0:
jpayne@68 841 if amt is not None:
jpayne@68 842 chunk_amt = min(amt, max_chunk_amt)
jpayne@68 843 amt -= chunk_amt
jpayne@68 844 else:
jpayne@68 845 chunk_amt = max_chunk_amt
jpayne@68 846 data = self._fp.read(chunk_amt)
jpayne@68 847 if not data:
jpayne@68 848 break
jpayne@68 849 buffer.write(data)
jpayne@68 850 del data # to reduce peak memory usage by `max_chunk_amt`.
jpayne@68 851 return buffer.getvalue()
jpayne@68 852 elif read1:
jpayne@68 853 return self._fp.read1(amt) if amt is not None else self._fp.read1()
jpayne@68 854 else:
jpayne@68 855 # StringIO doesn't like amt=None
jpayne@68 856 return self._fp.read(amt) if amt is not None else self._fp.read()
jpayne@68 857
jpayne@68 858 def _raw_read(
jpayne@68 859 self,
jpayne@68 860 amt: int | None = None,
jpayne@68 861 *,
jpayne@68 862 read1: bool = False,
jpayne@68 863 ) -> bytes:
jpayne@68 864 """
jpayne@68 865 Reads `amt` of bytes from the socket.
jpayne@68 866 """
jpayne@68 867 if self._fp is None:
jpayne@68 868 return None # type: ignore[return-value]
jpayne@68 869
jpayne@68 870 fp_closed = getattr(self._fp, "closed", False)
jpayne@68 871
jpayne@68 872 with self._error_catcher():
jpayne@68 873 data = self._fp_read(amt, read1=read1) if not fp_closed else b""
jpayne@68 874 if amt is not None and amt != 0 and not data:
jpayne@68 875 # Platform-specific: Buggy versions of Python.
jpayne@68 876 # Close the connection when no data is returned
jpayne@68 877 #
jpayne@68 878 # This is redundant to what httplib/http.client _should_
jpayne@68 879 # already do. However, versions of python released before
jpayne@68 880 # December 15, 2012 (http://bugs.python.org/issue16298) do
jpayne@68 881 # not properly close the connection in all cases. There is
jpayne@68 882 # no harm in redundantly calling close.
jpayne@68 883 self._fp.close()
jpayne@68 884 if (
jpayne@68 885 self.enforce_content_length
jpayne@68 886 and self.length_remaining is not None
jpayne@68 887 and self.length_remaining != 0
jpayne@68 888 ):
jpayne@68 889 # This is an edge case that httplib failed to cover due
jpayne@68 890 # to concerns of backward compatibility. We're
jpayne@68 891 # addressing it here to make sure IncompleteRead is
jpayne@68 892 # raised during streaming, so all calls with incorrect
jpayne@68 893 # Content-Length are caught.
jpayne@68 894 raise IncompleteRead(self._fp_bytes_read, self.length_remaining)
jpayne@68 895 elif read1 and (
jpayne@68 896 (amt != 0 and not data) or self.length_remaining == len(data)
jpayne@68 897 ):
jpayne@68 898 # All data has been read, but `self._fp.read1` in
jpayne@68 899 # CPython 3.12 and older doesn't always close
jpayne@68 900 # `http.client.HTTPResponse`, so we close it here.
jpayne@68 901 # See https://github.com/python/cpython/issues/113199
jpayne@68 902 self._fp.close()
jpayne@68 903
jpayne@68 904 if data:
jpayne@68 905 self._fp_bytes_read += len(data)
jpayne@68 906 if self.length_remaining is not None:
jpayne@68 907 self.length_remaining -= len(data)
jpayne@68 908 return data
jpayne@68 909
jpayne@68 910 def read(
jpayne@68 911 self,
jpayne@68 912 amt: int | None = None,
jpayne@68 913 decode_content: bool | None = None,
jpayne@68 914 cache_content: bool = False,
jpayne@68 915 ) -> bytes:
jpayne@68 916 """
jpayne@68 917 Similar to :meth:`http.client.HTTPResponse.read`, but with two additional
jpayne@68 918 parameters: ``decode_content`` and ``cache_content``.
jpayne@68 919
jpayne@68 920 :param amt:
jpayne@68 921 How much of the content to read. If specified, caching is skipped
jpayne@68 922 because it doesn't make sense to cache partial content as the full
jpayne@68 923 response.
jpayne@68 924
jpayne@68 925 :param decode_content:
jpayne@68 926 If True, will attempt to decode the body based on the
jpayne@68 927 'content-encoding' header.
jpayne@68 928
jpayne@68 929 :param cache_content:
jpayne@68 930 If True, will save the returned data such that the same result is
jpayne@68 931 returned despite of the state of the underlying file object. This
jpayne@68 932 is useful if you want the ``.data`` property to continue working
jpayne@68 933 after having ``.read()`` the file object. (Overridden if ``amt`` is
jpayne@68 934 set.)
jpayne@68 935 """
jpayne@68 936 self._init_decoder()
jpayne@68 937 if decode_content is None:
jpayne@68 938 decode_content = self.decode_content
jpayne@68 939
jpayne@68 940 if amt and amt < 0:
jpayne@68 941 # Negative numbers and `None` should be treated the same.
jpayne@68 942 amt = None
jpayne@68 943 elif amt is not None:
jpayne@68 944 cache_content = False
jpayne@68 945
jpayne@68 946 if len(self._decoded_buffer) >= amt:
jpayne@68 947 return self._decoded_buffer.get(amt)
jpayne@68 948
jpayne@68 949 data = self._raw_read(amt)
jpayne@68 950
jpayne@68 951 flush_decoder = amt is None or (amt != 0 and not data)
jpayne@68 952
jpayne@68 953 if not data and len(self._decoded_buffer) == 0:
jpayne@68 954 return data
jpayne@68 955
jpayne@68 956 if amt is None:
jpayne@68 957 data = self._decode(data, decode_content, flush_decoder)
jpayne@68 958 if cache_content:
jpayne@68 959 self._body = data
jpayne@68 960 else:
jpayne@68 961 # do not waste memory on buffer when not decoding
jpayne@68 962 if not decode_content:
jpayne@68 963 if self._has_decoded_content:
jpayne@68 964 raise RuntimeError(
jpayne@68 965 "Calling read(decode_content=False) is not supported after "
jpayne@68 966 "read(decode_content=True) was called."
jpayne@68 967 )
jpayne@68 968 return data
jpayne@68 969
jpayne@68 970 decoded_data = self._decode(data, decode_content, flush_decoder)
jpayne@68 971 self._decoded_buffer.put(decoded_data)
jpayne@68 972
jpayne@68 973 while len(self._decoded_buffer) < amt and data:
jpayne@68 974 # TODO make sure to initially read enough data to get past the headers
jpayne@68 975 # For example, the GZ file header takes 10 bytes, we don't want to read
jpayne@68 976 # it one byte at a time
jpayne@68 977 data = self._raw_read(amt)
jpayne@68 978 decoded_data = self._decode(data, decode_content, flush_decoder)
jpayne@68 979 self._decoded_buffer.put(decoded_data)
jpayne@68 980 data = self._decoded_buffer.get(amt)
jpayne@68 981
jpayne@68 982 return data
jpayne@68 983
jpayne@68 984 def read1(
jpayne@68 985 self,
jpayne@68 986 amt: int | None = None,
jpayne@68 987 decode_content: bool | None = None,
jpayne@68 988 ) -> bytes:
jpayne@68 989 """
jpayne@68 990 Similar to ``http.client.HTTPResponse.read1`` and documented
jpayne@68 991 in :meth:`io.BufferedReader.read1`, but with an additional parameter:
jpayne@68 992 ``decode_content``.
jpayne@68 993
jpayne@68 994 :param amt:
jpayne@68 995 How much of the content to read.
jpayne@68 996
jpayne@68 997 :param decode_content:
jpayne@68 998 If True, will attempt to decode the body based on the
jpayne@68 999 'content-encoding' header.
jpayne@68 1000 """
jpayne@68 1001 if decode_content is None:
jpayne@68 1002 decode_content = self.decode_content
jpayne@68 1003 if amt and amt < 0:
jpayne@68 1004 # Negative numbers and `None` should be treated the same.
jpayne@68 1005 amt = None
jpayne@68 1006 # try and respond without going to the network
jpayne@68 1007 if self._has_decoded_content:
jpayne@68 1008 if not decode_content:
jpayne@68 1009 raise RuntimeError(
jpayne@68 1010 "Calling read1(decode_content=False) is not supported after "
jpayne@68 1011 "read1(decode_content=True) was called."
jpayne@68 1012 )
jpayne@68 1013 if len(self._decoded_buffer) > 0:
jpayne@68 1014 if amt is None:
jpayne@68 1015 return self._decoded_buffer.get_all()
jpayne@68 1016 return self._decoded_buffer.get(amt)
jpayne@68 1017 if amt == 0:
jpayne@68 1018 return b""
jpayne@68 1019
jpayne@68 1020 # FIXME, this method's type doesn't say returning None is possible
jpayne@68 1021 data = self._raw_read(amt, read1=True)
jpayne@68 1022 if not decode_content or data is None:
jpayne@68 1023 return data
jpayne@68 1024
jpayne@68 1025 self._init_decoder()
jpayne@68 1026 while True:
jpayne@68 1027 flush_decoder = not data
jpayne@68 1028 decoded_data = self._decode(data, decode_content, flush_decoder)
jpayne@68 1029 self._decoded_buffer.put(decoded_data)
jpayne@68 1030 if decoded_data or flush_decoder:
jpayne@68 1031 break
jpayne@68 1032 data = self._raw_read(8192, read1=True)
jpayne@68 1033
jpayne@68 1034 if amt is None:
jpayne@68 1035 return self._decoded_buffer.get_all()
jpayne@68 1036 return self._decoded_buffer.get(amt)
jpayne@68 1037
jpayne@68 1038 def stream(
jpayne@68 1039 self, amt: int | None = 2**16, decode_content: bool | None = None
jpayne@68 1040 ) -> typing.Generator[bytes, None, None]:
jpayne@68 1041 """
jpayne@68 1042 A generator wrapper for the read() method. A call will block until
jpayne@68 1043 ``amt`` bytes have been read from the connection or until the
jpayne@68 1044 connection is closed.
jpayne@68 1045
jpayne@68 1046 :param amt:
jpayne@68 1047 How much of the content to read. The generator will return up to
jpayne@68 1048 much data per iteration, but may return less. This is particularly
jpayne@68 1049 likely when using compressed data. However, the empty string will
jpayne@68 1050 never be returned.
jpayne@68 1051
jpayne@68 1052 :param decode_content:
jpayne@68 1053 If True, will attempt to decode the body based on the
jpayne@68 1054 'content-encoding' header.
jpayne@68 1055 """
jpayne@68 1056 if self.chunked and self.supports_chunked_reads():
jpayne@68 1057 yield from self.read_chunked(amt, decode_content=decode_content)
jpayne@68 1058 else:
jpayne@68 1059 while not is_fp_closed(self._fp) or len(self._decoded_buffer) > 0:
jpayne@68 1060 data = self.read(amt=amt, decode_content=decode_content)
jpayne@68 1061
jpayne@68 1062 if data:
jpayne@68 1063 yield data
jpayne@68 1064
jpayne@68 1065 # Overrides from io.IOBase
jpayne@68 1066 def readable(self) -> bool:
jpayne@68 1067 return True
jpayne@68 1068
jpayne@68 1069 def close(self) -> None:
jpayne@68 1070 if not self.closed and self._fp:
jpayne@68 1071 self._fp.close()
jpayne@68 1072
jpayne@68 1073 if self._connection:
jpayne@68 1074 self._connection.close()
jpayne@68 1075
jpayne@68 1076 if not self.auto_close:
jpayne@68 1077 io.IOBase.close(self)
jpayne@68 1078
jpayne@68 1079 @property
jpayne@68 1080 def closed(self) -> bool:
jpayne@68 1081 if not self.auto_close:
jpayne@68 1082 return io.IOBase.closed.__get__(self) # type: ignore[no-any-return]
jpayne@68 1083 elif self._fp is None:
jpayne@68 1084 return True
jpayne@68 1085 elif hasattr(self._fp, "isclosed"):
jpayne@68 1086 return self._fp.isclosed()
jpayne@68 1087 elif hasattr(self._fp, "closed"):
jpayne@68 1088 return self._fp.closed
jpayne@68 1089 else:
jpayne@68 1090 return True
jpayne@68 1091
jpayne@68 1092 def fileno(self) -> int:
jpayne@68 1093 if self._fp is None:
jpayne@68 1094 raise OSError("HTTPResponse has no file to get a fileno from")
jpayne@68 1095 elif hasattr(self._fp, "fileno"):
jpayne@68 1096 return self._fp.fileno()
jpayne@68 1097 else:
jpayne@68 1098 raise OSError(
jpayne@68 1099 "The file-like object this HTTPResponse is wrapped "
jpayne@68 1100 "around has no file descriptor"
jpayne@68 1101 )
jpayne@68 1102
jpayne@68 1103 def flush(self) -> None:
jpayne@68 1104 if (
jpayne@68 1105 self._fp is not None
jpayne@68 1106 and hasattr(self._fp, "flush")
jpayne@68 1107 and not getattr(self._fp, "closed", False)
jpayne@68 1108 ):
jpayne@68 1109 return self._fp.flush()
jpayne@68 1110
jpayne@68 1111 def supports_chunked_reads(self) -> bool:
jpayne@68 1112 """
jpayne@68 1113 Checks if the underlying file-like object looks like a
jpayne@68 1114 :class:`http.client.HTTPResponse` object. We do this by testing for
jpayne@68 1115 the fp attribute. If it is present we assume it returns raw chunks as
jpayne@68 1116 processed by read_chunked().
jpayne@68 1117 """
jpayne@68 1118 return hasattr(self._fp, "fp")
jpayne@68 1119
jpayne@68 1120 def _update_chunk_length(self) -> None:
jpayne@68 1121 # First, we'll figure out length of a chunk and then
jpayne@68 1122 # we'll try to read it from socket.
jpayne@68 1123 if self.chunk_left is not None:
jpayne@68 1124 return None
jpayne@68 1125 line = self._fp.fp.readline() # type: ignore[union-attr]
jpayne@68 1126 line = line.split(b";", 1)[0]
jpayne@68 1127 try:
jpayne@68 1128 self.chunk_left = int(line, 16)
jpayne@68 1129 except ValueError:
jpayne@68 1130 self.close()
jpayne@68 1131 if line:
jpayne@68 1132 # Invalid chunked protocol response, abort.
jpayne@68 1133 raise InvalidChunkLength(self, line) from None
jpayne@68 1134 else:
jpayne@68 1135 # Truncated at start of next chunk
jpayne@68 1136 raise ProtocolError("Response ended prematurely") from None
jpayne@68 1137
jpayne@68 1138 def _handle_chunk(self, amt: int | None) -> bytes:
jpayne@68 1139 returned_chunk = None
jpayne@68 1140 if amt is None:
jpayne@68 1141 chunk = self._fp._safe_read(self.chunk_left) # type: ignore[union-attr]
jpayne@68 1142 returned_chunk = chunk
jpayne@68 1143 self._fp._safe_read(2) # type: ignore[union-attr] # Toss the CRLF at the end of the chunk.
jpayne@68 1144 self.chunk_left = None
jpayne@68 1145 elif self.chunk_left is not None and amt < self.chunk_left:
jpayne@68 1146 value = self._fp._safe_read(amt) # type: ignore[union-attr]
jpayne@68 1147 self.chunk_left = self.chunk_left - amt
jpayne@68 1148 returned_chunk = value
jpayne@68 1149 elif amt == self.chunk_left:
jpayne@68 1150 value = self._fp._safe_read(amt) # type: ignore[union-attr]
jpayne@68 1151 self._fp._safe_read(2) # type: ignore[union-attr] # Toss the CRLF at the end of the chunk.
jpayne@68 1152 self.chunk_left = None
jpayne@68 1153 returned_chunk = value
jpayne@68 1154 else: # amt > self.chunk_left
jpayne@68 1155 returned_chunk = self._fp._safe_read(self.chunk_left) # type: ignore[union-attr]
jpayne@68 1156 self._fp._safe_read(2) # type: ignore[union-attr] # Toss the CRLF at the end of the chunk.
jpayne@68 1157 self.chunk_left = None
jpayne@68 1158 return returned_chunk # type: ignore[no-any-return]
jpayne@68 1159
jpayne@68 1160 def read_chunked(
jpayne@68 1161 self, amt: int | None = None, decode_content: bool | None = None
jpayne@68 1162 ) -> typing.Generator[bytes, None, None]:
jpayne@68 1163 """
jpayne@68 1164 Similar to :meth:`HTTPResponse.read`, but with an additional
jpayne@68 1165 parameter: ``decode_content``.
jpayne@68 1166
jpayne@68 1167 :param amt:
jpayne@68 1168 How much of the content to read. If specified, caching is skipped
jpayne@68 1169 because it doesn't make sense to cache partial content as the full
jpayne@68 1170 response.
jpayne@68 1171
jpayne@68 1172 :param decode_content:
jpayne@68 1173 If True, will attempt to decode the body based on the
jpayne@68 1174 'content-encoding' header.
jpayne@68 1175 """
jpayne@68 1176 self._init_decoder()
jpayne@68 1177 # FIXME: Rewrite this method and make it a class with a better structured logic.
jpayne@68 1178 if not self.chunked:
jpayne@68 1179 raise ResponseNotChunked(
jpayne@68 1180 "Response is not chunked. "
jpayne@68 1181 "Header 'transfer-encoding: chunked' is missing."
jpayne@68 1182 )
jpayne@68 1183 if not self.supports_chunked_reads():
jpayne@68 1184 raise BodyNotHttplibCompatible(
jpayne@68 1185 "Body should be http.client.HTTPResponse like. "
jpayne@68 1186 "It should have have an fp attribute which returns raw chunks."
jpayne@68 1187 )
jpayne@68 1188
jpayne@68 1189 with self._error_catcher():
jpayne@68 1190 # Don't bother reading the body of a HEAD request.
jpayne@68 1191 if self._original_response and is_response_to_head(self._original_response):
jpayne@68 1192 self._original_response.close()
jpayne@68 1193 return None
jpayne@68 1194
jpayne@68 1195 # If a response is already read and closed
jpayne@68 1196 # then return immediately.
jpayne@68 1197 if self._fp.fp is None: # type: ignore[union-attr]
jpayne@68 1198 return None
jpayne@68 1199
jpayne@68 1200 if amt and amt < 0:
jpayne@68 1201 # Negative numbers and `None` should be treated the same,
jpayne@68 1202 # but httplib handles only `None` correctly.
jpayne@68 1203 amt = None
jpayne@68 1204
jpayne@68 1205 while True:
jpayne@68 1206 self._update_chunk_length()
jpayne@68 1207 if self.chunk_left == 0:
jpayne@68 1208 break
jpayne@68 1209 chunk = self._handle_chunk(amt)
jpayne@68 1210 decoded = self._decode(
jpayne@68 1211 chunk, decode_content=decode_content, flush_decoder=False
jpayne@68 1212 )
jpayne@68 1213 if decoded:
jpayne@68 1214 yield decoded
jpayne@68 1215
jpayne@68 1216 if decode_content:
jpayne@68 1217 # On CPython and PyPy, we should never need to flush the
jpayne@68 1218 # decoder. However, on Jython we *might* need to, so
jpayne@68 1219 # lets defensively do it anyway.
jpayne@68 1220 decoded = self._flush_decoder()
jpayne@68 1221 if decoded: # Platform-specific: Jython.
jpayne@68 1222 yield decoded
jpayne@68 1223
jpayne@68 1224 # Chunk content ends with \r\n: discard it.
jpayne@68 1225 while self._fp is not None:
jpayne@68 1226 line = self._fp.fp.readline()
jpayne@68 1227 if not line:
jpayne@68 1228 # Some sites may not end with '\r\n'.
jpayne@68 1229 break
jpayne@68 1230 if line == b"\r\n":
jpayne@68 1231 break
jpayne@68 1232
jpayne@68 1233 # We read everything; close the "file".
jpayne@68 1234 if self._original_response:
jpayne@68 1235 self._original_response.close()
jpayne@68 1236
jpayne@68 1237 @property
jpayne@68 1238 def url(self) -> str | None:
jpayne@68 1239 """
jpayne@68 1240 Returns the URL that was the source of this response.
jpayne@68 1241 If the request that generated this response redirected, this method
jpayne@68 1242 will return the final redirect location.
jpayne@68 1243 """
jpayne@68 1244 return self._request_url
jpayne@68 1245
jpayne@68 1246 @url.setter
jpayne@68 1247 def url(self, url: str) -> None:
jpayne@68 1248 self._request_url = url
jpayne@68 1249
jpayne@68 1250 def __iter__(self) -> typing.Iterator[bytes]:
jpayne@68 1251 buffer: list[bytes] = []
jpayne@68 1252 for chunk in self.stream(decode_content=True):
jpayne@68 1253 if b"\n" in chunk:
jpayne@68 1254 chunks = chunk.split(b"\n")
jpayne@68 1255 yield b"".join(buffer) + chunks[0] + b"\n"
jpayne@68 1256 for x in chunks[1:-1]:
jpayne@68 1257 yield x + b"\n"
jpayne@68 1258 if chunks[-1]:
jpayne@68 1259 buffer = [chunks[-1]]
jpayne@68 1260 else:
jpayne@68 1261 buffer = []
jpayne@68 1262 else:
jpayne@68 1263 buffer.append(chunk)
jpayne@68 1264 if buffer:
jpayne@68 1265 yield b"".join(buffer)