jpayne@69
|
1 from __future__ import annotations
|
jpayne@69
|
2
|
jpayne@69
|
3 import collections
|
jpayne@69
|
4 import io
|
jpayne@69
|
5 import json as _json
|
jpayne@69
|
6 import logging
|
jpayne@69
|
7 import re
|
jpayne@69
|
8 import sys
|
jpayne@69
|
9 import typing
|
jpayne@69
|
10 import warnings
|
jpayne@69
|
11 import zlib
|
jpayne@69
|
12 from contextlib import contextmanager
|
jpayne@69
|
13 from http.client import HTTPMessage as _HttplibHTTPMessage
|
jpayne@69
|
14 from http.client import HTTPResponse as _HttplibHTTPResponse
|
jpayne@69
|
15 from socket import timeout as SocketTimeout
|
jpayne@69
|
16
|
jpayne@69
|
17 if typing.TYPE_CHECKING:
|
jpayne@69
|
18 from ._base_connection import BaseHTTPConnection
|
jpayne@69
|
19
|
jpayne@69
|
20 try:
|
jpayne@69
|
21 try:
|
jpayne@69
|
22 import brotlicffi as brotli # type: ignore[import-not-found]
|
jpayne@69
|
23 except ImportError:
|
jpayne@69
|
24 import brotli # type: ignore[import-not-found]
|
jpayne@69
|
25 except ImportError:
|
jpayne@69
|
26 brotli = None
|
jpayne@69
|
27
|
jpayne@69
|
28 try:
|
jpayne@69
|
29 import zstandard as zstd
|
jpayne@69
|
30 except (AttributeError, ImportError, ValueError): # Defensive:
|
jpayne@69
|
31 HAS_ZSTD = False
|
jpayne@69
|
32 else:
|
jpayne@69
|
33 # The package 'zstandard' added the 'eof' property starting
|
jpayne@69
|
34 # in v0.18.0 which we require to ensure a complete and
|
jpayne@69
|
35 # valid zstd stream was fed into the ZstdDecoder.
|
jpayne@69
|
36 # See: https://github.com/urllib3/urllib3/pull/2624
|
jpayne@69
|
37 _zstd_version = tuple(
|
jpayne@69
|
38 map(int, re.search(r"^([0-9]+)\.([0-9]+)", zstd.__version__).groups()) # type: ignore[union-attr]
|
jpayne@69
|
39 )
|
jpayne@69
|
40 if _zstd_version < (0, 18): # Defensive:
|
jpayne@69
|
41 HAS_ZSTD = False
|
jpayne@69
|
42 else:
|
jpayne@69
|
43 HAS_ZSTD = True
|
jpayne@69
|
44
|
jpayne@69
|
45 from . import util
|
jpayne@69
|
46 from ._base_connection import _TYPE_BODY
|
jpayne@69
|
47 from ._collections import HTTPHeaderDict
|
jpayne@69
|
48 from .connection import BaseSSLError, HTTPConnection, HTTPException
|
jpayne@69
|
49 from .exceptions import (
|
jpayne@69
|
50 BodyNotHttplibCompatible,
|
jpayne@69
|
51 DecodeError,
|
jpayne@69
|
52 HTTPError,
|
jpayne@69
|
53 IncompleteRead,
|
jpayne@69
|
54 InvalidChunkLength,
|
jpayne@69
|
55 InvalidHeader,
|
jpayne@69
|
56 ProtocolError,
|
jpayne@69
|
57 ReadTimeoutError,
|
jpayne@69
|
58 ResponseNotChunked,
|
jpayne@69
|
59 SSLError,
|
jpayne@69
|
60 )
|
jpayne@69
|
61 from .util.response import is_fp_closed, is_response_to_head
|
jpayne@69
|
62 from .util.retry import Retry
|
jpayne@69
|
63
|
jpayne@69
|
64 if typing.TYPE_CHECKING:
|
jpayne@69
|
65 from .connectionpool import HTTPConnectionPool
|
jpayne@69
|
66
|
jpayne@69
|
67 log = logging.getLogger(__name__)
|
jpayne@69
|
68
|
jpayne@69
|
69
|
jpayne@69
|
70 class ContentDecoder:
|
jpayne@69
|
71 def decompress(self, data: bytes) -> bytes:
|
jpayne@69
|
72 raise NotImplementedError()
|
jpayne@69
|
73
|
jpayne@69
|
74 def flush(self) -> bytes:
|
jpayne@69
|
75 raise NotImplementedError()
|
jpayne@69
|
76
|
jpayne@69
|
77
|
jpayne@69
|
78 class DeflateDecoder(ContentDecoder):
|
jpayne@69
|
79 def __init__(self) -> None:
|
jpayne@69
|
80 self._first_try = True
|
jpayne@69
|
81 self._data = b""
|
jpayne@69
|
82 self._obj = zlib.decompressobj()
|
jpayne@69
|
83
|
jpayne@69
|
84 def decompress(self, data: bytes) -> bytes:
|
jpayne@69
|
85 if not data:
|
jpayne@69
|
86 return data
|
jpayne@69
|
87
|
jpayne@69
|
88 if not self._first_try:
|
jpayne@69
|
89 return self._obj.decompress(data)
|
jpayne@69
|
90
|
jpayne@69
|
91 self._data += data
|
jpayne@69
|
92 try:
|
jpayne@69
|
93 decompressed = self._obj.decompress(data)
|
jpayne@69
|
94 if decompressed:
|
jpayne@69
|
95 self._first_try = False
|
jpayne@69
|
96 self._data = None # type: ignore[assignment]
|
jpayne@69
|
97 return decompressed
|
jpayne@69
|
98 except zlib.error:
|
jpayne@69
|
99 self._first_try = False
|
jpayne@69
|
100 self._obj = zlib.decompressobj(-zlib.MAX_WBITS)
|
jpayne@69
|
101 try:
|
jpayne@69
|
102 return self.decompress(self._data)
|
jpayne@69
|
103 finally:
|
jpayne@69
|
104 self._data = None # type: ignore[assignment]
|
jpayne@69
|
105
|
jpayne@69
|
106 def flush(self) -> bytes:
|
jpayne@69
|
107 return self._obj.flush()
|
jpayne@69
|
108
|
jpayne@69
|
109
|
jpayne@69
|
110 class GzipDecoderState:
|
jpayne@69
|
111 FIRST_MEMBER = 0
|
jpayne@69
|
112 OTHER_MEMBERS = 1
|
jpayne@69
|
113 SWALLOW_DATA = 2
|
jpayne@69
|
114
|
jpayne@69
|
115
|
jpayne@69
|
116 class GzipDecoder(ContentDecoder):
|
jpayne@69
|
117 def __init__(self) -> None:
|
jpayne@69
|
118 self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
|
jpayne@69
|
119 self._state = GzipDecoderState.FIRST_MEMBER
|
jpayne@69
|
120
|
jpayne@69
|
121 def decompress(self, data: bytes) -> bytes:
|
jpayne@69
|
122 ret = bytearray()
|
jpayne@69
|
123 if self._state == GzipDecoderState.SWALLOW_DATA or not data:
|
jpayne@69
|
124 return bytes(ret)
|
jpayne@69
|
125 while True:
|
jpayne@69
|
126 try:
|
jpayne@69
|
127 ret += self._obj.decompress(data)
|
jpayne@69
|
128 except zlib.error:
|
jpayne@69
|
129 previous_state = self._state
|
jpayne@69
|
130 # Ignore data after the first error
|
jpayne@69
|
131 self._state = GzipDecoderState.SWALLOW_DATA
|
jpayne@69
|
132 if previous_state == GzipDecoderState.OTHER_MEMBERS:
|
jpayne@69
|
133 # Allow trailing garbage acceptable in other gzip clients
|
jpayne@69
|
134 return bytes(ret)
|
jpayne@69
|
135 raise
|
jpayne@69
|
136 data = self._obj.unused_data
|
jpayne@69
|
137 if not data:
|
jpayne@69
|
138 return bytes(ret)
|
jpayne@69
|
139 self._state = GzipDecoderState.OTHER_MEMBERS
|
jpayne@69
|
140 self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
|
jpayne@69
|
141
|
jpayne@69
|
142 def flush(self) -> bytes:
|
jpayne@69
|
143 return self._obj.flush()
|
jpayne@69
|
144
|
jpayne@69
|
145
|
jpayne@69
|
146 if brotli is not None:
|
jpayne@69
|
147
|
jpayne@69
|
148 class BrotliDecoder(ContentDecoder):
|
jpayne@69
|
149 # Supports both 'brotlipy' and 'Brotli' packages
|
jpayne@69
|
150 # since they share an import name. The top branches
|
jpayne@69
|
151 # are for 'brotlipy' and bottom branches for 'Brotli'
|
jpayne@69
|
152 def __init__(self) -> None:
|
jpayne@69
|
153 self._obj = brotli.Decompressor()
|
jpayne@69
|
154 if hasattr(self._obj, "decompress"):
|
jpayne@69
|
155 setattr(self, "decompress", self._obj.decompress)
|
jpayne@69
|
156 else:
|
jpayne@69
|
157 setattr(self, "decompress", self._obj.process)
|
jpayne@69
|
158
|
jpayne@69
|
159 def flush(self) -> bytes:
|
jpayne@69
|
160 if hasattr(self._obj, "flush"):
|
jpayne@69
|
161 return self._obj.flush() # type: ignore[no-any-return]
|
jpayne@69
|
162 return b""
|
jpayne@69
|
163
|
jpayne@69
|
164
|
jpayne@69
|
165 if HAS_ZSTD:
|
jpayne@69
|
166
|
jpayne@69
|
167 class ZstdDecoder(ContentDecoder):
|
jpayne@69
|
168 def __init__(self) -> None:
|
jpayne@69
|
169 self._obj = zstd.ZstdDecompressor().decompressobj()
|
jpayne@69
|
170
|
jpayne@69
|
171 def decompress(self, data: bytes) -> bytes:
|
jpayne@69
|
172 if not data:
|
jpayne@69
|
173 return b""
|
jpayne@69
|
174 data_parts = [self._obj.decompress(data)]
|
jpayne@69
|
175 while self._obj.eof and self._obj.unused_data:
|
jpayne@69
|
176 unused_data = self._obj.unused_data
|
jpayne@69
|
177 self._obj = zstd.ZstdDecompressor().decompressobj()
|
jpayne@69
|
178 data_parts.append(self._obj.decompress(unused_data))
|
jpayne@69
|
179 return b"".join(data_parts)
|
jpayne@69
|
180
|
jpayne@69
|
181 def flush(self) -> bytes:
|
jpayne@69
|
182 ret = self._obj.flush() # note: this is a no-op
|
jpayne@69
|
183 if not self._obj.eof:
|
jpayne@69
|
184 raise DecodeError("Zstandard data is incomplete")
|
jpayne@69
|
185 return ret
|
jpayne@69
|
186
|
jpayne@69
|
187
|
jpayne@69
|
188 class MultiDecoder(ContentDecoder):
|
jpayne@69
|
189 """
|
jpayne@69
|
190 From RFC7231:
|
jpayne@69
|
191 If one or more encodings have been applied to a representation, the
|
jpayne@69
|
192 sender that applied the encodings MUST generate a Content-Encoding
|
jpayne@69
|
193 header field that lists the content codings in the order in which
|
jpayne@69
|
194 they were applied.
|
jpayne@69
|
195 """
|
jpayne@69
|
196
|
jpayne@69
|
197 def __init__(self, modes: str) -> None:
|
jpayne@69
|
198 self._decoders = [_get_decoder(m.strip()) for m in modes.split(",")]
|
jpayne@69
|
199
|
jpayne@69
|
200 def flush(self) -> bytes:
|
jpayne@69
|
201 return self._decoders[0].flush()
|
jpayne@69
|
202
|
jpayne@69
|
203 def decompress(self, data: bytes) -> bytes:
|
jpayne@69
|
204 for d in reversed(self._decoders):
|
jpayne@69
|
205 data = d.decompress(data)
|
jpayne@69
|
206 return data
|
jpayne@69
|
207
|
jpayne@69
|
208
|
jpayne@69
|
209 def _get_decoder(mode: str) -> ContentDecoder:
|
jpayne@69
|
210 if "," in mode:
|
jpayne@69
|
211 return MultiDecoder(mode)
|
jpayne@69
|
212
|
jpayne@69
|
213 # According to RFC 9110 section 8.4.1.3, recipients should
|
jpayne@69
|
214 # consider x-gzip equivalent to gzip
|
jpayne@69
|
215 if mode in ("gzip", "x-gzip"):
|
jpayne@69
|
216 return GzipDecoder()
|
jpayne@69
|
217
|
jpayne@69
|
218 if brotli is not None and mode == "br":
|
jpayne@69
|
219 return BrotliDecoder()
|
jpayne@69
|
220
|
jpayne@69
|
221 if HAS_ZSTD and mode == "zstd":
|
jpayne@69
|
222 return ZstdDecoder()
|
jpayne@69
|
223
|
jpayne@69
|
224 return DeflateDecoder()
|
jpayne@69
|
225
|
jpayne@69
|
226
|
jpayne@69
|
227 class BytesQueueBuffer:
|
jpayne@69
|
228 """Memory-efficient bytes buffer
|
jpayne@69
|
229
|
jpayne@69
|
230 To return decoded data in read() and still follow the BufferedIOBase API, we need a
|
jpayne@69
|
231 buffer to always return the correct amount of bytes.
|
jpayne@69
|
232
|
jpayne@69
|
233 This buffer should be filled using calls to put()
|
jpayne@69
|
234
|
jpayne@69
|
235 Our maximum memory usage is determined by the sum of the size of:
|
jpayne@69
|
236
|
jpayne@69
|
237 * self.buffer, which contains the full data
|
jpayne@69
|
238 * the largest chunk that we will copy in get()
|
jpayne@69
|
239
|
jpayne@69
|
240 The worst case scenario is a single chunk, in which case we'll make a full copy of
|
jpayne@69
|
241 the data inside get().
|
jpayne@69
|
242 """
|
jpayne@69
|
243
|
jpayne@69
|
244 def __init__(self) -> None:
|
jpayne@69
|
245 self.buffer: typing.Deque[bytes] = collections.deque()
|
jpayne@69
|
246 self._size: int = 0
|
jpayne@69
|
247
|
jpayne@69
|
248 def __len__(self) -> int:
|
jpayne@69
|
249 return self._size
|
jpayne@69
|
250
|
jpayne@69
|
251 def put(self, data: bytes) -> None:
|
jpayne@69
|
252 self.buffer.append(data)
|
jpayne@69
|
253 self._size += len(data)
|
jpayne@69
|
254
|
jpayne@69
|
255 def get(self, n: int) -> bytes:
|
jpayne@69
|
256 if n == 0:
|
jpayne@69
|
257 return b""
|
jpayne@69
|
258 elif not self.buffer:
|
jpayne@69
|
259 raise RuntimeError("buffer is empty")
|
jpayne@69
|
260 elif n < 0:
|
jpayne@69
|
261 raise ValueError("n should be > 0")
|
jpayne@69
|
262
|
jpayne@69
|
263 fetched = 0
|
jpayne@69
|
264 ret = io.BytesIO()
|
jpayne@69
|
265 while fetched < n:
|
jpayne@69
|
266 remaining = n - fetched
|
jpayne@69
|
267 chunk = self.buffer.popleft()
|
jpayne@69
|
268 chunk_length = len(chunk)
|
jpayne@69
|
269 if remaining < chunk_length:
|
jpayne@69
|
270 left_chunk, right_chunk = chunk[:remaining], chunk[remaining:]
|
jpayne@69
|
271 ret.write(left_chunk)
|
jpayne@69
|
272 self.buffer.appendleft(right_chunk)
|
jpayne@69
|
273 self._size -= remaining
|
jpayne@69
|
274 break
|
jpayne@69
|
275 else:
|
jpayne@69
|
276 ret.write(chunk)
|
jpayne@69
|
277 self._size -= chunk_length
|
jpayne@69
|
278 fetched += chunk_length
|
jpayne@69
|
279
|
jpayne@69
|
280 if not self.buffer:
|
jpayne@69
|
281 break
|
jpayne@69
|
282
|
jpayne@69
|
283 return ret.getvalue()
|
jpayne@69
|
284
|
jpayne@69
|
285 def get_all(self) -> bytes:
|
jpayne@69
|
286 buffer = self.buffer
|
jpayne@69
|
287 if not buffer:
|
jpayne@69
|
288 assert self._size == 0
|
jpayne@69
|
289 return b""
|
jpayne@69
|
290 if len(buffer) == 1:
|
jpayne@69
|
291 result = buffer.pop()
|
jpayne@69
|
292 else:
|
jpayne@69
|
293 ret = io.BytesIO()
|
jpayne@69
|
294 ret.writelines(buffer.popleft() for _ in range(len(buffer)))
|
jpayne@69
|
295 result = ret.getvalue()
|
jpayne@69
|
296 self._size = 0
|
jpayne@69
|
297 return result
|
jpayne@69
|
298
|
jpayne@69
|
299
|
jpayne@69
|
300 class BaseHTTPResponse(io.IOBase):
|
jpayne@69
|
301 CONTENT_DECODERS = ["gzip", "x-gzip", "deflate"]
|
jpayne@69
|
302 if brotli is not None:
|
jpayne@69
|
303 CONTENT_DECODERS += ["br"]
|
jpayne@69
|
304 if HAS_ZSTD:
|
jpayne@69
|
305 CONTENT_DECODERS += ["zstd"]
|
jpayne@69
|
306 REDIRECT_STATUSES = [301, 302, 303, 307, 308]
|
jpayne@69
|
307
|
jpayne@69
|
308 DECODER_ERROR_CLASSES: tuple[type[Exception], ...] = (IOError, zlib.error)
|
jpayne@69
|
309 if brotli is not None:
|
jpayne@69
|
310 DECODER_ERROR_CLASSES += (brotli.error,)
|
jpayne@69
|
311
|
jpayne@69
|
312 if HAS_ZSTD:
|
jpayne@69
|
313 DECODER_ERROR_CLASSES += (zstd.ZstdError,)
|
jpayne@69
|
314
|
jpayne@69
|
315 def __init__(
|
jpayne@69
|
316 self,
|
jpayne@69
|
317 *,
|
jpayne@69
|
318 headers: typing.Mapping[str, str] | typing.Mapping[bytes, bytes] | None = None,
|
jpayne@69
|
319 status: int,
|
jpayne@69
|
320 version: int,
|
jpayne@69
|
321 version_string: str,
|
jpayne@69
|
322 reason: str | None,
|
jpayne@69
|
323 decode_content: bool,
|
jpayne@69
|
324 request_url: str | None,
|
jpayne@69
|
325 retries: Retry | None = None,
|
jpayne@69
|
326 ) -> None:
|
jpayne@69
|
327 if isinstance(headers, HTTPHeaderDict):
|
jpayne@69
|
328 self.headers = headers
|
jpayne@69
|
329 else:
|
jpayne@69
|
330 self.headers = HTTPHeaderDict(headers) # type: ignore[arg-type]
|
jpayne@69
|
331 self.status = status
|
jpayne@69
|
332 self.version = version
|
jpayne@69
|
333 self.version_string = version_string
|
jpayne@69
|
334 self.reason = reason
|
jpayne@69
|
335 self.decode_content = decode_content
|
jpayne@69
|
336 self._has_decoded_content = False
|
jpayne@69
|
337 self._request_url: str | None = request_url
|
jpayne@69
|
338 self.retries = retries
|
jpayne@69
|
339
|
jpayne@69
|
340 self.chunked = False
|
jpayne@69
|
341 tr_enc = self.headers.get("transfer-encoding", "").lower()
|
jpayne@69
|
342 # Don't incur the penalty of creating a list and then discarding it
|
jpayne@69
|
343 encodings = (enc.strip() for enc in tr_enc.split(","))
|
jpayne@69
|
344 if "chunked" in encodings:
|
jpayne@69
|
345 self.chunked = True
|
jpayne@69
|
346
|
jpayne@69
|
347 self._decoder: ContentDecoder | None = None
|
jpayne@69
|
348 self.length_remaining: int | None
|
jpayne@69
|
349
|
jpayne@69
|
350 def get_redirect_location(self) -> str | None | typing.Literal[False]:
|
jpayne@69
|
351 """
|
jpayne@69
|
352 Should we redirect and where to?
|
jpayne@69
|
353
|
jpayne@69
|
354 :returns: Truthy redirect location string if we got a redirect status
|
jpayne@69
|
355 code and valid location. ``None`` if redirect status and no
|
jpayne@69
|
356 location. ``False`` if not a redirect status code.
|
jpayne@69
|
357 """
|
jpayne@69
|
358 if self.status in self.REDIRECT_STATUSES:
|
jpayne@69
|
359 return self.headers.get("location")
|
jpayne@69
|
360 return False
|
jpayne@69
|
361
|
jpayne@69
|
362 @property
|
jpayne@69
|
363 def data(self) -> bytes:
|
jpayne@69
|
364 raise NotImplementedError()
|
jpayne@69
|
365
|
jpayne@69
|
366 def json(self) -> typing.Any:
|
jpayne@69
|
367 """
|
jpayne@69
|
368 Deserializes the body of the HTTP response as a Python object.
|
jpayne@69
|
369
|
jpayne@69
|
370 The body of the HTTP response must be encoded using UTF-8, as per
|
jpayne@69
|
371 `RFC 8529 Section 8.1 <https://www.rfc-editor.org/rfc/rfc8259#section-8.1>`_.
|
jpayne@69
|
372
|
jpayne@69
|
373 To use a custom JSON decoder pass the result of :attr:`HTTPResponse.data` to
|
jpayne@69
|
374 your custom decoder instead.
|
jpayne@69
|
375
|
jpayne@69
|
376 If the body of the HTTP response is not decodable to UTF-8, a
|
jpayne@69
|
377 `UnicodeDecodeError` will be raised. If the body of the HTTP response is not a
|
jpayne@69
|
378 valid JSON document, a `json.JSONDecodeError` will be raised.
|
jpayne@69
|
379
|
jpayne@69
|
380 Read more :ref:`here <json_content>`.
|
jpayne@69
|
381
|
jpayne@69
|
382 :returns: The body of the HTTP response as a Python object.
|
jpayne@69
|
383 """
|
jpayne@69
|
384 data = self.data.decode("utf-8")
|
jpayne@69
|
385 return _json.loads(data)
|
jpayne@69
|
386
|
jpayne@69
|
387 @property
|
jpayne@69
|
388 def url(self) -> str | None:
|
jpayne@69
|
389 raise NotImplementedError()
|
jpayne@69
|
390
|
jpayne@69
|
391 @url.setter
|
jpayne@69
|
392 def url(self, url: str | None) -> None:
|
jpayne@69
|
393 raise NotImplementedError()
|
jpayne@69
|
394
|
jpayne@69
|
395 @property
|
jpayne@69
|
396 def connection(self) -> BaseHTTPConnection | None:
|
jpayne@69
|
397 raise NotImplementedError()
|
jpayne@69
|
398
|
jpayne@69
|
399 @property
|
jpayne@69
|
400 def retries(self) -> Retry | None:
|
jpayne@69
|
401 return self._retries
|
jpayne@69
|
402
|
jpayne@69
|
403 @retries.setter
|
jpayne@69
|
404 def retries(self, retries: Retry | None) -> None:
|
jpayne@69
|
405 # Override the request_url if retries has a redirect location.
|
jpayne@69
|
406 if retries is not None and retries.history:
|
jpayne@69
|
407 self.url = retries.history[-1].redirect_location
|
jpayne@69
|
408 self._retries = retries
|
jpayne@69
|
409
|
jpayne@69
|
410 def stream(
|
jpayne@69
|
411 self, amt: int | None = 2**16, decode_content: bool | None = None
|
jpayne@69
|
412 ) -> typing.Iterator[bytes]:
|
jpayne@69
|
413 raise NotImplementedError()
|
jpayne@69
|
414
|
jpayne@69
|
415 def read(
|
jpayne@69
|
416 self,
|
jpayne@69
|
417 amt: int | None = None,
|
jpayne@69
|
418 decode_content: bool | None = None,
|
jpayne@69
|
419 cache_content: bool = False,
|
jpayne@69
|
420 ) -> bytes:
|
jpayne@69
|
421 raise NotImplementedError()
|
jpayne@69
|
422
|
jpayne@69
|
423 def read1(
|
jpayne@69
|
424 self,
|
jpayne@69
|
425 amt: int | None = None,
|
jpayne@69
|
426 decode_content: bool | None = None,
|
jpayne@69
|
427 ) -> bytes:
|
jpayne@69
|
428 raise NotImplementedError()
|
jpayne@69
|
429
|
jpayne@69
|
430 def read_chunked(
|
jpayne@69
|
431 self,
|
jpayne@69
|
432 amt: int | None = None,
|
jpayne@69
|
433 decode_content: bool | None = None,
|
jpayne@69
|
434 ) -> typing.Iterator[bytes]:
|
jpayne@69
|
435 raise NotImplementedError()
|
jpayne@69
|
436
|
jpayne@69
|
437 def release_conn(self) -> None:
|
jpayne@69
|
438 raise NotImplementedError()
|
jpayne@69
|
439
|
jpayne@69
|
440 def drain_conn(self) -> None:
|
jpayne@69
|
441 raise NotImplementedError()
|
jpayne@69
|
442
|
jpayne@69
|
443 def close(self) -> None:
|
jpayne@69
|
444 raise NotImplementedError()
|
jpayne@69
|
445
|
jpayne@69
|
446 def _init_decoder(self) -> None:
|
jpayne@69
|
447 """
|
jpayne@69
|
448 Set-up the _decoder attribute if necessary.
|
jpayne@69
|
449 """
|
jpayne@69
|
450 # Note: content-encoding value should be case-insensitive, per RFC 7230
|
jpayne@69
|
451 # Section 3.2
|
jpayne@69
|
452 content_encoding = self.headers.get("content-encoding", "").lower()
|
jpayne@69
|
453 if self._decoder is None:
|
jpayne@69
|
454 if content_encoding in self.CONTENT_DECODERS:
|
jpayne@69
|
455 self._decoder = _get_decoder(content_encoding)
|
jpayne@69
|
456 elif "," in content_encoding:
|
jpayne@69
|
457 encodings = [
|
jpayne@69
|
458 e.strip()
|
jpayne@69
|
459 for e in content_encoding.split(",")
|
jpayne@69
|
460 if e.strip() in self.CONTENT_DECODERS
|
jpayne@69
|
461 ]
|
jpayne@69
|
462 if encodings:
|
jpayne@69
|
463 self._decoder = _get_decoder(content_encoding)
|
jpayne@69
|
464
|
jpayne@69
|
465 def _decode(
|
jpayne@69
|
466 self, data: bytes, decode_content: bool | None, flush_decoder: bool
|
jpayne@69
|
467 ) -> bytes:
|
jpayne@69
|
468 """
|
jpayne@69
|
469 Decode the data passed in and potentially flush the decoder.
|
jpayne@69
|
470 """
|
jpayne@69
|
471 if not decode_content:
|
jpayne@69
|
472 if self._has_decoded_content:
|
jpayne@69
|
473 raise RuntimeError(
|
jpayne@69
|
474 "Calling read(decode_content=False) is not supported after "
|
jpayne@69
|
475 "read(decode_content=True) was called."
|
jpayne@69
|
476 )
|
jpayne@69
|
477 return data
|
jpayne@69
|
478
|
jpayne@69
|
479 try:
|
jpayne@69
|
480 if self._decoder:
|
jpayne@69
|
481 data = self._decoder.decompress(data)
|
jpayne@69
|
482 self._has_decoded_content = True
|
jpayne@69
|
483 except self.DECODER_ERROR_CLASSES as e:
|
jpayne@69
|
484 content_encoding = self.headers.get("content-encoding", "").lower()
|
jpayne@69
|
485 raise DecodeError(
|
jpayne@69
|
486 "Received response with content-encoding: %s, but "
|
jpayne@69
|
487 "failed to decode it." % content_encoding,
|
jpayne@69
|
488 e,
|
jpayne@69
|
489 ) from e
|
jpayne@69
|
490 if flush_decoder:
|
jpayne@69
|
491 data += self._flush_decoder()
|
jpayne@69
|
492
|
jpayne@69
|
493 return data
|
jpayne@69
|
494
|
jpayne@69
|
495 def _flush_decoder(self) -> bytes:
|
jpayne@69
|
496 """
|
jpayne@69
|
497 Flushes the decoder. Should only be called if the decoder is actually
|
jpayne@69
|
498 being used.
|
jpayne@69
|
499 """
|
jpayne@69
|
500 if self._decoder:
|
jpayne@69
|
501 return self._decoder.decompress(b"") + self._decoder.flush()
|
jpayne@69
|
502 return b""
|
jpayne@69
|
503
|
jpayne@69
|
504 # Compatibility methods for `io` module
|
jpayne@69
|
505 def readinto(self, b: bytearray) -> int:
|
jpayne@69
|
506 temp = self.read(len(b))
|
jpayne@69
|
507 if len(temp) == 0:
|
jpayne@69
|
508 return 0
|
jpayne@69
|
509 else:
|
jpayne@69
|
510 b[: len(temp)] = temp
|
jpayne@69
|
511 return len(temp)
|
jpayne@69
|
512
|
jpayne@69
|
513 # Compatibility methods for http.client.HTTPResponse
|
jpayne@69
|
514 def getheaders(self) -> HTTPHeaderDict:
|
jpayne@69
|
515 warnings.warn(
|
jpayne@69
|
516 "HTTPResponse.getheaders() is deprecated and will be removed "
|
jpayne@69
|
517 "in urllib3 v2.1.0. Instead access HTTPResponse.headers directly.",
|
jpayne@69
|
518 category=DeprecationWarning,
|
jpayne@69
|
519 stacklevel=2,
|
jpayne@69
|
520 )
|
jpayne@69
|
521 return self.headers
|
jpayne@69
|
522
|
jpayne@69
|
523 def getheader(self, name: str, default: str | None = None) -> str | None:
|
jpayne@69
|
524 warnings.warn(
|
jpayne@69
|
525 "HTTPResponse.getheader() is deprecated and will be removed "
|
jpayne@69
|
526 "in urllib3 v2.1.0. Instead use HTTPResponse.headers.get(name, default).",
|
jpayne@69
|
527 category=DeprecationWarning,
|
jpayne@69
|
528 stacklevel=2,
|
jpayne@69
|
529 )
|
jpayne@69
|
530 return self.headers.get(name, default)
|
jpayne@69
|
531
|
jpayne@69
|
532 # Compatibility method for http.cookiejar
|
jpayne@69
|
533 def info(self) -> HTTPHeaderDict:
|
jpayne@69
|
534 return self.headers
|
jpayne@69
|
535
|
jpayne@69
|
536 def geturl(self) -> str | None:
|
jpayne@69
|
537 return self.url
|
jpayne@69
|
538
|
jpayne@69
|
539
|
jpayne@69
|
540 class HTTPResponse(BaseHTTPResponse):
|
jpayne@69
|
541 """
|
jpayne@69
|
542 HTTP Response container.
|
jpayne@69
|
543
|
jpayne@69
|
544 Backwards-compatible with :class:`http.client.HTTPResponse` but the response ``body`` is
|
jpayne@69
|
545 loaded and decoded on-demand when the ``data`` property is accessed. This
|
jpayne@69
|
546 class is also compatible with the Python standard library's :mod:`io`
|
jpayne@69
|
547 module, and can hence be treated as a readable object in the context of that
|
jpayne@69
|
548 framework.
|
jpayne@69
|
549
|
jpayne@69
|
550 Extra parameters for behaviour not present in :class:`http.client.HTTPResponse`:
|
jpayne@69
|
551
|
jpayne@69
|
552 :param preload_content:
|
jpayne@69
|
553 If True, the response's body will be preloaded during construction.
|
jpayne@69
|
554
|
jpayne@69
|
555 :param decode_content:
|
jpayne@69
|
556 If True, will attempt to decode the body based on the
|
jpayne@69
|
557 'content-encoding' header.
|
jpayne@69
|
558
|
jpayne@69
|
559 :param original_response:
|
jpayne@69
|
560 When this HTTPResponse wrapper is generated from an :class:`http.client.HTTPResponse`
|
jpayne@69
|
561 object, it's convenient to include the original for debug purposes. It's
|
jpayne@69
|
562 otherwise unused.
|
jpayne@69
|
563
|
jpayne@69
|
564 :param retries:
|
jpayne@69
|
565 The retries contains the last :class:`~urllib3.util.retry.Retry` that
|
jpayne@69
|
566 was used during the request.
|
jpayne@69
|
567
|
jpayne@69
|
568 :param enforce_content_length:
|
jpayne@69
|
569 Enforce content length checking. Body returned by server must match
|
jpayne@69
|
570 value of Content-Length header, if present. Otherwise, raise error.
|
jpayne@69
|
571 """
|
jpayne@69
|
572
|
jpayne@69
|
573 def __init__(
|
jpayne@69
|
574 self,
|
jpayne@69
|
575 body: _TYPE_BODY = "",
|
jpayne@69
|
576 headers: typing.Mapping[str, str] | typing.Mapping[bytes, bytes] | None = None,
|
jpayne@69
|
577 status: int = 0,
|
jpayne@69
|
578 version: int = 0,
|
jpayne@69
|
579 version_string: str = "HTTP/?",
|
jpayne@69
|
580 reason: str | None = None,
|
jpayne@69
|
581 preload_content: bool = True,
|
jpayne@69
|
582 decode_content: bool = True,
|
jpayne@69
|
583 original_response: _HttplibHTTPResponse | None = None,
|
jpayne@69
|
584 pool: HTTPConnectionPool | None = None,
|
jpayne@69
|
585 connection: HTTPConnection | None = None,
|
jpayne@69
|
586 msg: _HttplibHTTPMessage | None = None,
|
jpayne@69
|
587 retries: Retry | None = None,
|
jpayne@69
|
588 enforce_content_length: bool = True,
|
jpayne@69
|
589 request_method: str | None = None,
|
jpayne@69
|
590 request_url: str | None = None,
|
jpayne@69
|
591 auto_close: bool = True,
|
jpayne@69
|
592 ) -> None:
|
jpayne@69
|
593 super().__init__(
|
jpayne@69
|
594 headers=headers,
|
jpayne@69
|
595 status=status,
|
jpayne@69
|
596 version=version,
|
jpayne@69
|
597 version_string=version_string,
|
jpayne@69
|
598 reason=reason,
|
jpayne@69
|
599 decode_content=decode_content,
|
jpayne@69
|
600 request_url=request_url,
|
jpayne@69
|
601 retries=retries,
|
jpayne@69
|
602 )
|
jpayne@69
|
603
|
jpayne@69
|
604 self.enforce_content_length = enforce_content_length
|
jpayne@69
|
605 self.auto_close = auto_close
|
jpayne@69
|
606
|
jpayne@69
|
607 self._body = None
|
jpayne@69
|
608 self._fp: _HttplibHTTPResponse | None = None
|
jpayne@69
|
609 self._original_response = original_response
|
jpayne@69
|
610 self._fp_bytes_read = 0
|
jpayne@69
|
611 self.msg = msg
|
jpayne@69
|
612
|
jpayne@69
|
613 if body and isinstance(body, (str, bytes)):
|
jpayne@69
|
614 self._body = body
|
jpayne@69
|
615
|
jpayne@69
|
616 self._pool = pool
|
jpayne@69
|
617 self._connection = connection
|
jpayne@69
|
618
|
jpayne@69
|
619 if hasattr(body, "read"):
|
jpayne@69
|
620 self._fp = body # type: ignore[assignment]
|
jpayne@69
|
621
|
jpayne@69
|
622 # Are we using the chunked-style of transfer encoding?
|
jpayne@69
|
623 self.chunk_left: int | None = None
|
jpayne@69
|
624
|
jpayne@69
|
625 # Determine length of response
|
jpayne@69
|
626 self.length_remaining = self._init_length(request_method)
|
jpayne@69
|
627
|
jpayne@69
|
628 # Used to return the correct amount of bytes for partial read()s
|
jpayne@69
|
629 self._decoded_buffer = BytesQueueBuffer()
|
jpayne@69
|
630
|
jpayne@69
|
631 # If requested, preload the body.
|
jpayne@69
|
632 if preload_content and not self._body:
|
jpayne@69
|
633 self._body = self.read(decode_content=decode_content)
|
jpayne@69
|
634
|
jpayne@69
|
635 def release_conn(self) -> None:
|
jpayne@69
|
636 if not self._pool or not self._connection:
|
jpayne@69
|
637 return None
|
jpayne@69
|
638
|
jpayne@69
|
639 self._pool._put_conn(self._connection)
|
jpayne@69
|
640 self._connection = None
|
jpayne@69
|
641
|
jpayne@69
|
642 def drain_conn(self) -> None:
|
jpayne@69
|
643 """
|
jpayne@69
|
644 Read and discard any remaining HTTP response data in the response connection.
|
jpayne@69
|
645
|
jpayne@69
|
646 Unread data in the HTTPResponse connection blocks the connection from being released back to the pool.
|
jpayne@69
|
647 """
|
jpayne@69
|
648 try:
|
jpayne@69
|
649 self.read()
|
jpayne@69
|
650 except (HTTPError, OSError, BaseSSLError, HTTPException):
|
jpayne@69
|
651 pass
|
jpayne@69
|
652
|
jpayne@69
|
653 @property
|
jpayne@69
|
654 def data(self) -> bytes:
|
jpayne@69
|
655 # For backwards-compat with earlier urllib3 0.4 and earlier.
|
jpayne@69
|
656 if self._body:
|
jpayne@69
|
657 return self._body # type: ignore[return-value]
|
jpayne@69
|
658
|
jpayne@69
|
659 if self._fp:
|
jpayne@69
|
660 return self.read(cache_content=True)
|
jpayne@69
|
661
|
jpayne@69
|
662 return None # type: ignore[return-value]
|
jpayne@69
|
663
|
jpayne@69
|
664 @property
|
jpayne@69
|
665 def connection(self) -> HTTPConnection | None:
|
jpayne@69
|
666 return self._connection
|
jpayne@69
|
667
|
jpayne@69
|
668 def isclosed(self) -> bool:
|
jpayne@69
|
669 return is_fp_closed(self._fp)
|
jpayne@69
|
670
|
jpayne@69
|
671 def tell(self) -> int:
|
jpayne@69
|
672 """
|
jpayne@69
|
673 Obtain the number of bytes pulled over the wire so far. May differ from
|
jpayne@69
|
674 the amount of content returned by :meth:``urllib3.response.HTTPResponse.read``
|
jpayne@69
|
675 if bytes are encoded on the wire (e.g, compressed).
|
jpayne@69
|
676 """
|
jpayne@69
|
677 return self._fp_bytes_read
|
jpayne@69
|
678
|
jpayne@69
|
679 def _init_length(self, request_method: str | None) -> int | None:
|
jpayne@69
|
680 """
|
jpayne@69
|
681 Set initial length value for Response content if available.
|
jpayne@69
|
682 """
|
jpayne@69
|
683 length: int | None
|
jpayne@69
|
684 content_length: str | None = self.headers.get("content-length")
|
jpayne@69
|
685
|
jpayne@69
|
686 if content_length is not None:
|
jpayne@69
|
687 if self.chunked:
|
jpayne@69
|
688 # This Response will fail with an IncompleteRead if it can't be
|
jpayne@69
|
689 # received as chunked. This method falls back to attempt reading
|
jpayne@69
|
690 # the response before raising an exception.
|
jpayne@69
|
691 log.warning(
|
jpayne@69
|
692 "Received response with both Content-Length and "
|
jpayne@69
|
693 "Transfer-Encoding set. This is expressly forbidden "
|
jpayne@69
|
694 "by RFC 7230 sec 3.3.2. Ignoring Content-Length and "
|
jpayne@69
|
695 "attempting to process response as Transfer-Encoding: "
|
jpayne@69
|
696 "chunked."
|
jpayne@69
|
697 )
|
jpayne@69
|
698 return None
|
jpayne@69
|
699
|
jpayne@69
|
700 try:
|
jpayne@69
|
701 # RFC 7230 section 3.3.2 specifies multiple content lengths can
|
jpayne@69
|
702 # be sent in a single Content-Length header
|
jpayne@69
|
703 # (e.g. Content-Length: 42, 42). This line ensures the values
|
jpayne@69
|
704 # are all valid ints and that as long as the `set` length is 1,
|
jpayne@69
|
705 # all values are the same. Otherwise, the header is invalid.
|
jpayne@69
|
706 lengths = {int(val) for val in content_length.split(",")}
|
jpayne@69
|
707 if len(lengths) > 1:
|
jpayne@69
|
708 raise InvalidHeader(
|
jpayne@69
|
709 "Content-Length contained multiple "
|
jpayne@69
|
710 "unmatching values (%s)" % content_length
|
jpayne@69
|
711 )
|
jpayne@69
|
712 length = lengths.pop()
|
jpayne@69
|
713 except ValueError:
|
jpayne@69
|
714 length = None
|
jpayne@69
|
715 else:
|
jpayne@69
|
716 if length < 0:
|
jpayne@69
|
717 length = None
|
jpayne@69
|
718
|
jpayne@69
|
719 else: # if content_length is None
|
jpayne@69
|
720 length = None
|
jpayne@69
|
721
|
jpayne@69
|
722 # Convert status to int for comparison
|
jpayne@69
|
723 # In some cases, httplib returns a status of "_UNKNOWN"
|
jpayne@69
|
724 try:
|
jpayne@69
|
725 status = int(self.status)
|
jpayne@69
|
726 except ValueError:
|
jpayne@69
|
727 status = 0
|
jpayne@69
|
728
|
jpayne@69
|
729 # Check for responses that shouldn't include a body
|
jpayne@69
|
730 if status in (204, 304) or 100 <= status < 200 or request_method == "HEAD":
|
jpayne@69
|
731 length = 0
|
jpayne@69
|
732
|
jpayne@69
|
733 return length
|
jpayne@69
|
734
|
jpayne@69
|
735 @contextmanager
|
jpayne@69
|
736 def _error_catcher(self) -> typing.Generator[None, None, None]:
|
jpayne@69
|
737 """
|
jpayne@69
|
738 Catch low-level python exceptions, instead re-raising urllib3
|
jpayne@69
|
739 variants, so that low-level exceptions are not leaked in the
|
jpayne@69
|
740 high-level api.
|
jpayne@69
|
741
|
jpayne@69
|
742 On exit, release the connection back to the pool.
|
jpayne@69
|
743 """
|
jpayne@69
|
744 clean_exit = False
|
jpayne@69
|
745
|
jpayne@69
|
746 try:
|
jpayne@69
|
747 try:
|
jpayne@69
|
748 yield
|
jpayne@69
|
749
|
jpayne@69
|
750 except SocketTimeout as e:
|
jpayne@69
|
751 # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but
|
jpayne@69
|
752 # there is yet no clean way to get at it from this context.
|
jpayne@69
|
753 raise ReadTimeoutError(self._pool, None, "Read timed out.") from e # type: ignore[arg-type]
|
jpayne@69
|
754
|
jpayne@69
|
755 except BaseSSLError as e:
|
jpayne@69
|
756 # FIXME: Is there a better way to differentiate between SSLErrors?
|
jpayne@69
|
757 if "read operation timed out" not in str(e):
|
jpayne@69
|
758 # SSL errors related to framing/MAC get wrapped and reraised here
|
jpayne@69
|
759 raise SSLError(e) from e
|
jpayne@69
|
760
|
jpayne@69
|
761 raise ReadTimeoutError(self._pool, None, "Read timed out.") from e # type: ignore[arg-type]
|
jpayne@69
|
762
|
jpayne@69
|
763 except IncompleteRead as e:
|
jpayne@69
|
764 if (
|
jpayne@69
|
765 e.expected is not None
|
jpayne@69
|
766 and e.partial is not None
|
jpayne@69
|
767 and e.expected == -e.partial
|
jpayne@69
|
768 ):
|
jpayne@69
|
769 arg = "Response may not contain content."
|
jpayne@69
|
770 else:
|
jpayne@69
|
771 arg = f"Connection broken: {e!r}"
|
jpayne@69
|
772 raise ProtocolError(arg, e) from e
|
jpayne@69
|
773
|
jpayne@69
|
774 except (HTTPException, OSError) as e:
|
jpayne@69
|
775 raise ProtocolError(f"Connection broken: {e!r}", e) from e
|
jpayne@69
|
776
|
jpayne@69
|
777 # If no exception is thrown, we should avoid cleaning up
|
jpayne@69
|
778 # unnecessarily.
|
jpayne@69
|
779 clean_exit = True
|
jpayne@69
|
780 finally:
|
jpayne@69
|
781 # If we didn't terminate cleanly, we need to throw away our
|
jpayne@69
|
782 # connection.
|
jpayne@69
|
783 if not clean_exit:
|
jpayne@69
|
784 # The response may not be closed but we're not going to use it
|
jpayne@69
|
785 # anymore so close it now to ensure that the connection is
|
jpayne@69
|
786 # released back to the pool.
|
jpayne@69
|
787 if self._original_response:
|
jpayne@69
|
788 self._original_response.close()
|
jpayne@69
|
789
|
jpayne@69
|
790 # Closing the response may not actually be sufficient to close
|
jpayne@69
|
791 # everything, so if we have a hold of the connection close that
|
jpayne@69
|
792 # too.
|
jpayne@69
|
793 if self._connection:
|
jpayne@69
|
794 self._connection.close()
|
jpayne@69
|
795
|
jpayne@69
|
796 # If we hold the original response but it's closed now, we should
|
jpayne@69
|
797 # return the connection back to the pool.
|
jpayne@69
|
798 if self._original_response and self._original_response.isclosed():
|
jpayne@69
|
799 self.release_conn()
|
jpayne@69
|
800
|
jpayne@69
|
801 def _fp_read(
|
jpayne@69
|
802 self,
|
jpayne@69
|
803 amt: int | None = None,
|
jpayne@69
|
804 *,
|
jpayne@69
|
805 read1: bool = False,
|
jpayne@69
|
806 ) -> bytes:
|
jpayne@69
|
807 """
|
jpayne@69
|
808 Read a response with the thought that reading the number of bytes
|
jpayne@69
|
809 larger than can fit in a 32-bit int at a time via SSL in some
|
jpayne@69
|
810 known cases leads to an overflow error that has to be prevented
|
jpayne@69
|
811 if `amt` or `self.length_remaining` indicate that a problem may
|
jpayne@69
|
812 happen.
|
jpayne@69
|
813
|
jpayne@69
|
814 The known cases:
|
jpayne@69
|
815 * 3.8 <= CPython < 3.9.7 because of a bug
|
jpayne@69
|
816 https://github.com/urllib3/urllib3/issues/2513#issuecomment-1152559900.
|
jpayne@69
|
817 * urllib3 injected with pyOpenSSL-backed SSL-support.
|
jpayne@69
|
818 * CPython < 3.10 only when `amt` does not fit 32-bit int.
|
jpayne@69
|
819 """
|
jpayne@69
|
820 assert self._fp
|
jpayne@69
|
821 c_int_max = 2**31 - 1
|
jpayne@69
|
822 if (
|
jpayne@69
|
823 (amt and amt > c_int_max)
|
jpayne@69
|
824 or (
|
jpayne@69
|
825 amt is None
|
jpayne@69
|
826 and self.length_remaining
|
jpayne@69
|
827 and self.length_remaining > c_int_max
|
jpayne@69
|
828 )
|
jpayne@69
|
829 ) and (util.IS_PYOPENSSL or sys.version_info < (3, 10)):
|
jpayne@69
|
830 if read1:
|
jpayne@69
|
831 return self._fp.read1(c_int_max)
|
jpayne@69
|
832 buffer = io.BytesIO()
|
jpayne@69
|
833 # Besides `max_chunk_amt` being a maximum chunk size, it
|
jpayne@69
|
834 # affects memory overhead of reading a response by this
|
jpayne@69
|
835 # method in CPython.
|
jpayne@69
|
836 # `c_int_max` equal to 2 GiB - 1 byte is the actual maximum
|
jpayne@69
|
837 # chunk size that does not lead to an overflow error, but
|
jpayne@69
|
838 # 256 MiB is a compromise.
|
jpayne@69
|
839 max_chunk_amt = 2**28
|
jpayne@69
|
840 while amt is None or amt != 0:
|
jpayne@69
|
841 if amt is not None:
|
jpayne@69
|
842 chunk_amt = min(amt, max_chunk_amt)
|
jpayne@69
|
843 amt -= chunk_amt
|
jpayne@69
|
844 else:
|
jpayne@69
|
845 chunk_amt = max_chunk_amt
|
jpayne@69
|
846 data = self._fp.read(chunk_amt)
|
jpayne@69
|
847 if not data:
|
jpayne@69
|
848 break
|
jpayne@69
|
849 buffer.write(data)
|
jpayne@69
|
850 del data # to reduce peak memory usage by `max_chunk_amt`.
|
jpayne@69
|
851 return buffer.getvalue()
|
jpayne@69
|
852 elif read1:
|
jpayne@69
|
853 return self._fp.read1(amt) if amt is not None else self._fp.read1()
|
jpayne@69
|
854 else:
|
jpayne@69
|
855 # StringIO doesn't like amt=None
|
jpayne@69
|
856 return self._fp.read(amt) if amt is not None else self._fp.read()
|
jpayne@69
|
857
|
jpayne@69
|
858 def _raw_read(
|
jpayne@69
|
859 self,
|
jpayne@69
|
860 amt: int | None = None,
|
jpayne@69
|
861 *,
|
jpayne@69
|
862 read1: bool = False,
|
jpayne@69
|
863 ) -> bytes:
|
jpayne@69
|
864 """
|
jpayne@69
|
865 Reads `amt` of bytes from the socket.
|
jpayne@69
|
866 """
|
jpayne@69
|
867 if self._fp is None:
|
jpayne@69
|
868 return None # type: ignore[return-value]
|
jpayne@69
|
869
|
jpayne@69
|
870 fp_closed = getattr(self._fp, "closed", False)
|
jpayne@69
|
871
|
jpayne@69
|
872 with self._error_catcher():
|
jpayne@69
|
873 data = self._fp_read(amt, read1=read1) if not fp_closed else b""
|
jpayne@69
|
874 if amt is not None and amt != 0 and not data:
|
jpayne@69
|
875 # Platform-specific: Buggy versions of Python.
|
jpayne@69
|
876 # Close the connection when no data is returned
|
jpayne@69
|
877 #
|
jpayne@69
|
878 # This is redundant to what httplib/http.client _should_
|
jpayne@69
|
879 # already do. However, versions of python released before
|
jpayne@69
|
880 # December 15, 2012 (http://bugs.python.org/issue16298) do
|
jpayne@69
|
881 # not properly close the connection in all cases. There is
|
jpayne@69
|
882 # no harm in redundantly calling close.
|
jpayne@69
|
883 self._fp.close()
|
jpayne@69
|
884 if (
|
jpayne@69
|
885 self.enforce_content_length
|
jpayne@69
|
886 and self.length_remaining is not None
|
jpayne@69
|
887 and self.length_remaining != 0
|
jpayne@69
|
888 ):
|
jpayne@69
|
889 # This is an edge case that httplib failed to cover due
|
jpayne@69
|
890 # to concerns of backward compatibility. We're
|
jpayne@69
|
891 # addressing it here to make sure IncompleteRead is
|
jpayne@69
|
892 # raised during streaming, so all calls with incorrect
|
jpayne@69
|
893 # Content-Length are caught.
|
jpayne@69
|
894 raise IncompleteRead(self._fp_bytes_read, self.length_remaining)
|
jpayne@69
|
895 elif read1 and (
|
jpayne@69
|
896 (amt != 0 and not data) or self.length_remaining == len(data)
|
jpayne@69
|
897 ):
|
jpayne@69
|
898 # All data has been read, but `self._fp.read1` in
|
jpayne@69
|
899 # CPython 3.12 and older doesn't always close
|
jpayne@69
|
900 # `http.client.HTTPResponse`, so we close it here.
|
jpayne@69
|
901 # See https://github.com/python/cpython/issues/113199
|
jpayne@69
|
902 self._fp.close()
|
jpayne@69
|
903
|
jpayne@69
|
904 if data:
|
jpayne@69
|
905 self._fp_bytes_read += len(data)
|
jpayne@69
|
906 if self.length_remaining is not None:
|
jpayne@69
|
907 self.length_remaining -= len(data)
|
jpayne@69
|
908 return data
|
jpayne@69
|
909
|
jpayne@69
|
910 def read(
|
jpayne@69
|
911 self,
|
jpayne@69
|
912 amt: int | None = None,
|
jpayne@69
|
913 decode_content: bool | None = None,
|
jpayne@69
|
914 cache_content: bool = False,
|
jpayne@69
|
915 ) -> bytes:
|
jpayne@69
|
916 """
|
jpayne@69
|
917 Similar to :meth:`http.client.HTTPResponse.read`, but with two additional
|
jpayne@69
|
918 parameters: ``decode_content`` and ``cache_content``.
|
jpayne@69
|
919
|
jpayne@69
|
920 :param amt:
|
jpayne@69
|
921 How much of the content to read. If specified, caching is skipped
|
jpayne@69
|
922 because it doesn't make sense to cache partial content as the full
|
jpayne@69
|
923 response.
|
jpayne@69
|
924
|
jpayne@69
|
925 :param decode_content:
|
jpayne@69
|
926 If True, will attempt to decode the body based on the
|
jpayne@69
|
927 'content-encoding' header.
|
jpayne@69
|
928
|
jpayne@69
|
929 :param cache_content:
|
jpayne@69
|
930 If True, will save the returned data such that the same result is
|
jpayne@69
|
931 returned despite of the state of the underlying file object. This
|
jpayne@69
|
932 is useful if you want the ``.data`` property to continue working
|
jpayne@69
|
933 after having ``.read()`` the file object. (Overridden if ``amt`` is
|
jpayne@69
|
934 set.)
|
jpayne@69
|
935 """
|
jpayne@69
|
936 self._init_decoder()
|
jpayne@69
|
937 if decode_content is None:
|
jpayne@69
|
938 decode_content = self.decode_content
|
jpayne@69
|
939
|
jpayne@69
|
940 if amt and amt < 0:
|
jpayne@69
|
941 # Negative numbers and `None` should be treated the same.
|
jpayne@69
|
942 amt = None
|
jpayne@69
|
943 elif amt is not None:
|
jpayne@69
|
944 cache_content = False
|
jpayne@69
|
945
|
jpayne@69
|
946 if len(self._decoded_buffer) >= amt:
|
jpayne@69
|
947 return self._decoded_buffer.get(amt)
|
jpayne@69
|
948
|
jpayne@69
|
949 data = self._raw_read(amt)
|
jpayne@69
|
950
|
jpayne@69
|
951 flush_decoder = amt is None or (amt != 0 and not data)
|
jpayne@69
|
952
|
jpayne@69
|
953 if not data and len(self._decoded_buffer) == 0:
|
jpayne@69
|
954 return data
|
jpayne@69
|
955
|
jpayne@69
|
956 if amt is None:
|
jpayne@69
|
957 data = self._decode(data, decode_content, flush_decoder)
|
jpayne@69
|
958 if cache_content:
|
jpayne@69
|
959 self._body = data
|
jpayne@69
|
960 else:
|
jpayne@69
|
961 # do not waste memory on buffer when not decoding
|
jpayne@69
|
962 if not decode_content:
|
jpayne@69
|
963 if self._has_decoded_content:
|
jpayne@69
|
964 raise RuntimeError(
|
jpayne@69
|
965 "Calling read(decode_content=False) is not supported after "
|
jpayne@69
|
966 "read(decode_content=True) was called."
|
jpayne@69
|
967 )
|
jpayne@69
|
968 return data
|
jpayne@69
|
969
|
jpayne@69
|
970 decoded_data = self._decode(data, decode_content, flush_decoder)
|
jpayne@69
|
971 self._decoded_buffer.put(decoded_data)
|
jpayne@69
|
972
|
jpayne@69
|
973 while len(self._decoded_buffer) < amt and data:
|
jpayne@69
|
974 # TODO make sure to initially read enough data to get past the headers
|
jpayne@69
|
975 # For example, the GZ file header takes 10 bytes, we don't want to read
|
jpayne@69
|
976 # it one byte at a time
|
jpayne@69
|
977 data = self._raw_read(amt)
|
jpayne@69
|
978 decoded_data = self._decode(data, decode_content, flush_decoder)
|
jpayne@69
|
979 self._decoded_buffer.put(decoded_data)
|
jpayne@69
|
980 data = self._decoded_buffer.get(amt)
|
jpayne@69
|
981
|
jpayne@69
|
982 return data
|
jpayne@69
|
983
|
jpayne@69
|
984 def read1(
|
jpayne@69
|
985 self,
|
jpayne@69
|
986 amt: int | None = None,
|
jpayne@69
|
987 decode_content: bool | None = None,
|
jpayne@69
|
988 ) -> bytes:
|
jpayne@69
|
989 """
|
jpayne@69
|
990 Similar to ``http.client.HTTPResponse.read1`` and documented
|
jpayne@69
|
991 in :meth:`io.BufferedReader.read1`, but with an additional parameter:
|
jpayne@69
|
992 ``decode_content``.
|
jpayne@69
|
993
|
jpayne@69
|
994 :param amt:
|
jpayne@69
|
995 How much of the content to read.
|
jpayne@69
|
996
|
jpayne@69
|
997 :param decode_content:
|
jpayne@69
|
998 If True, will attempt to decode the body based on the
|
jpayne@69
|
999 'content-encoding' header.
|
jpayne@69
|
1000 """
|
jpayne@69
|
1001 if decode_content is None:
|
jpayne@69
|
1002 decode_content = self.decode_content
|
jpayne@69
|
1003 if amt and amt < 0:
|
jpayne@69
|
1004 # Negative numbers and `None` should be treated the same.
|
jpayne@69
|
1005 amt = None
|
jpayne@69
|
1006 # try and respond without going to the network
|
jpayne@69
|
1007 if self._has_decoded_content:
|
jpayne@69
|
1008 if not decode_content:
|
jpayne@69
|
1009 raise RuntimeError(
|
jpayne@69
|
1010 "Calling read1(decode_content=False) is not supported after "
|
jpayne@69
|
1011 "read1(decode_content=True) was called."
|
jpayne@69
|
1012 )
|
jpayne@69
|
1013 if len(self._decoded_buffer) > 0:
|
jpayne@69
|
1014 if amt is None:
|
jpayne@69
|
1015 return self._decoded_buffer.get_all()
|
jpayne@69
|
1016 return self._decoded_buffer.get(amt)
|
jpayne@69
|
1017 if amt == 0:
|
jpayne@69
|
1018 return b""
|
jpayne@69
|
1019
|
jpayne@69
|
1020 # FIXME, this method's type doesn't say returning None is possible
|
jpayne@69
|
1021 data = self._raw_read(amt, read1=True)
|
jpayne@69
|
1022 if not decode_content or data is None:
|
jpayne@69
|
1023 return data
|
jpayne@69
|
1024
|
jpayne@69
|
1025 self._init_decoder()
|
jpayne@69
|
1026 while True:
|
jpayne@69
|
1027 flush_decoder = not data
|
jpayne@69
|
1028 decoded_data = self._decode(data, decode_content, flush_decoder)
|
jpayne@69
|
1029 self._decoded_buffer.put(decoded_data)
|
jpayne@69
|
1030 if decoded_data or flush_decoder:
|
jpayne@69
|
1031 break
|
jpayne@69
|
1032 data = self._raw_read(8192, read1=True)
|
jpayne@69
|
1033
|
jpayne@69
|
1034 if amt is None:
|
jpayne@69
|
1035 return self._decoded_buffer.get_all()
|
jpayne@69
|
1036 return self._decoded_buffer.get(amt)
|
jpayne@69
|
1037
|
jpayne@69
|
1038 def stream(
|
jpayne@69
|
1039 self, amt: int | None = 2**16, decode_content: bool | None = None
|
jpayne@69
|
1040 ) -> typing.Generator[bytes, None, None]:
|
jpayne@69
|
1041 """
|
jpayne@69
|
1042 A generator wrapper for the read() method. A call will block until
|
jpayne@69
|
1043 ``amt`` bytes have been read from the connection or until the
|
jpayne@69
|
1044 connection is closed.
|
jpayne@69
|
1045
|
jpayne@69
|
1046 :param amt:
|
jpayne@69
|
1047 How much of the content to read. The generator will return up to
|
jpayne@69
|
1048 much data per iteration, but may return less. This is particularly
|
jpayne@69
|
1049 likely when using compressed data. However, the empty string will
|
jpayne@69
|
1050 never be returned.
|
jpayne@69
|
1051
|
jpayne@69
|
1052 :param decode_content:
|
jpayne@69
|
1053 If True, will attempt to decode the body based on the
|
jpayne@69
|
1054 'content-encoding' header.
|
jpayne@69
|
1055 """
|
jpayne@69
|
1056 if self.chunked and self.supports_chunked_reads():
|
jpayne@69
|
1057 yield from self.read_chunked(amt, decode_content=decode_content)
|
jpayne@69
|
1058 else:
|
jpayne@69
|
1059 while not is_fp_closed(self._fp) or len(self._decoded_buffer) > 0:
|
jpayne@69
|
1060 data = self.read(amt=amt, decode_content=decode_content)
|
jpayne@69
|
1061
|
jpayne@69
|
1062 if data:
|
jpayne@69
|
1063 yield data
|
jpayne@69
|
1064
|
jpayne@69
|
1065 # Overrides from io.IOBase
|
jpayne@69
|
1066 def readable(self) -> bool:
|
jpayne@69
|
1067 return True
|
jpayne@69
|
1068
|
jpayne@69
|
1069 def close(self) -> None:
|
jpayne@69
|
1070 if not self.closed and self._fp:
|
jpayne@69
|
1071 self._fp.close()
|
jpayne@69
|
1072
|
jpayne@69
|
1073 if self._connection:
|
jpayne@69
|
1074 self._connection.close()
|
jpayne@69
|
1075
|
jpayne@69
|
1076 if not self.auto_close:
|
jpayne@69
|
1077 io.IOBase.close(self)
|
jpayne@69
|
1078
|
jpayne@69
|
1079 @property
|
jpayne@69
|
1080 def closed(self) -> bool:
|
jpayne@69
|
1081 if not self.auto_close:
|
jpayne@69
|
1082 return io.IOBase.closed.__get__(self) # type: ignore[no-any-return]
|
jpayne@69
|
1083 elif self._fp is None:
|
jpayne@69
|
1084 return True
|
jpayne@69
|
1085 elif hasattr(self._fp, "isclosed"):
|
jpayne@69
|
1086 return self._fp.isclosed()
|
jpayne@69
|
1087 elif hasattr(self._fp, "closed"):
|
jpayne@69
|
1088 return self._fp.closed
|
jpayne@69
|
1089 else:
|
jpayne@69
|
1090 return True
|
jpayne@69
|
1091
|
jpayne@69
|
1092 def fileno(self) -> int:
|
jpayne@69
|
1093 if self._fp is None:
|
jpayne@69
|
1094 raise OSError("HTTPResponse has no file to get a fileno from")
|
jpayne@69
|
1095 elif hasattr(self._fp, "fileno"):
|
jpayne@69
|
1096 return self._fp.fileno()
|
jpayne@69
|
1097 else:
|
jpayne@69
|
1098 raise OSError(
|
jpayne@69
|
1099 "The file-like object this HTTPResponse is wrapped "
|
jpayne@69
|
1100 "around has no file descriptor"
|
jpayne@69
|
1101 )
|
jpayne@69
|
1102
|
jpayne@69
|
1103 def flush(self) -> None:
|
jpayne@69
|
1104 if (
|
jpayne@69
|
1105 self._fp is not None
|
jpayne@69
|
1106 and hasattr(self._fp, "flush")
|
jpayne@69
|
1107 and not getattr(self._fp, "closed", False)
|
jpayne@69
|
1108 ):
|
jpayne@69
|
1109 return self._fp.flush()
|
jpayne@69
|
1110
|
jpayne@69
|
1111 def supports_chunked_reads(self) -> bool:
|
jpayne@69
|
1112 """
|
jpayne@69
|
1113 Checks if the underlying file-like object looks like a
|
jpayne@69
|
1114 :class:`http.client.HTTPResponse` object. We do this by testing for
|
jpayne@69
|
1115 the fp attribute. If it is present we assume it returns raw chunks as
|
jpayne@69
|
1116 processed by read_chunked().
|
jpayne@69
|
1117 """
|
jpayne@69
|
1118 return hasattr(self._fp, "fp")
|
jpayne@69
|
1119
|
jpayne@69
|
1120 def _update_chunk_length(self) -> None:
|
jpayne@69
|
1121 # First, we'll figure out length of a chunk and then
|
jpayne@69
|
1122 # we'll try to read it from socket.
|
jpayne@69
|
1123 if self.chunk_left is not None:
|
jpayne@69
|
1124 return None
|
jpayne@69
|
1125 line = self._fp.fp.readline() # type: ignore[union-attr]
|
jpayne@69
|
1126 line = line.split(b";", 1)[0]
|
jpayne@69
|
1127 try:
|
jpayne@69
|
1128 self.chunk_left = int(line, 16)
|
jpayne@69
|
1129 except ValueError:
|
jpayne@69
|
1130 self.close()
|
jpayne@69
|
1131 if line:
|
jpayne@69
|
1132 # Invalid chunked protocol response, abort.
|
jpayne@69
|
1133 raise InvalidChunkLength(self, line) from None
|
jpayne@69
|
1134 else:
|
jpayne@69
|
1135 # Truncated at start of next chunk
|
jpayne@69
|
1136 raise ProtocolError("Response ended prematurely") from None
|
jpayne@69
|
1137
|
jpayne@69
|
1138 def _handle_chunk(self, amt: int | None) -> bytes:
|
jpayne@69
|
1139 returned_chunk = None
|
jpayne@69
|
1140 if amt is None:
|
jpayne@69
|
1141 chunk = self._fp._safe_read(self.chunk_left) # type: ignore[union-attr]
|
jpayne@69
|
1142 returned_chunk = chunk
|
jpayne@69
|
1143 self._fp._safe_read(2) # type: ignore[union-attr] # Toss the CRLF at the end of the chunk.
|
jpayne@69
|
1144 self.chunk_left = None
|
jpayne@69
|
1145 elif self.chunk_left is not None and amt < self.chunk_left:
|
jpayne@69
|
1146 value = self._fp._safe_read(amt) # type: ignore[union-attr]
|
jpayne@69
|
1147 self.chunk_left = self.chunk_left - amt
|
jpayne@69
|
1148 returned_chunk = value
|
jpayne@69
|
1149 elif amt == self.chunk_left:
|
jpayne@69
|
1150 value = self._fp._safe_read(amt) # type: ignore[union-attr]
|
jpayne@69
|
1151 self._fp._safe_read(2) # type: ignore[union-attr] # Toss the CRLF at the end of the chunk.
|
jpayne@69
|
1152 self.chunk_left = None
|
jpayne@69
|
1153 returned_chunk = value
|
jpayne@69
|
1154 else: # amt > self.chunk_left
|
jpayne@69
|
1155 returned_chunk = self._fp._safe_read(self.chunk_left) # type: ignore[union-attr]
|
jpayne@69
|
1156 self._fp._safe_read(2) # type: ignore[union-attr] # Toss the CRLF at the end of the chunk.
|
jpayne@69
|
1157 self.chunk_left = None
|
jpayne@69
|
1158 return returned_chunk # type: ignore[no-any-return]
|
jpayne@69
|
1159
|
jpayne@69
|
1160 def read_chunked(
|
jpayne@69
|
1161 self, amt: int | None = None, decode_content: bool | None = None
|
jpayne@69
|
1162 ) -> typing.Generator[bytes, None, None]:
|
jpayne@69
|
1163 """
|
jpayne@69
|
1164 Similar to :meth:`HTTPResponse.read`, but with an additional
|
jpayne@69
|
1165 parameter: ``decode_content``.
|
jpayne@69
|
1166
|
jpayne@69
|
1167 :param amt:
|
jpayne@69
|
1168 How much of the content to read. If specified, caching is skipped
|
jpayne@69
|
1169 because it doesn't make sense to cache partial content as the full
|
jpayne@69
|
1170 response.
|
jpayne@69
|
1171
|
jpayne@69
|
1172 :param decode_content:
|
jpayne@69
|
1173 If True, will attempt to decode the body based on the
|
jpayne@69
|
1174 'content-encoding' header.
|
jpayne@69
|
1175 """
|
jpayne@69
|
1176 self._init_decoder()
|
jpayne@69
|
1177 # FIXME: Rewrite this method and make it a class with a better structured logic.
|
jpayne@69
|
1178 if not self.chunked:
|
jpayne@69
|
1179 raise ResponseNotChunked(
|
jpayne@69
|
1180 "Response is not chunked. "
|
jpayne@69
|
1181 "Header 'transfer-encoding: chunked' is missing."
|
jpayne@69
|
1182 )
|
jpayne@69
|
1183 if not self.supports_chunked_reads():
|
jpayne@69
|
1184 raise BodyNotHttplibCompatible(
|
jpayne@69
|
1185 "Body should be http.client.HTTPResponse like. "
|
jpayne@69
|
1186 "It should have have an fp attribute which returns raw chunks."
|
jpayne@69
|
1187 )
|
jpayne@69
|
1188
|
jpayne@69
|
1189 with self._error_catcher():
|
jpayne@69
|
1190 # Don't bother reading the body of a HEAD request.
|
jpayne@69
|
1191 if self._original_response and is_response_to_head(self._original_response):
|
jpayne@69
|
1192 self._original_response.close()
|
jpayne@69
|
1193 return None
|
jpayne@69
|
1194
|
jpayne@69
|
1195 # If a response is already read and closed
|
jpayne@69
|
1196 # then return immediately.
|
jpayne@69
|
1197 if self._fp.fp is None: # type: ignore[union-attr]
|
jpayne@69
|
1198 return None
|
jpayne@69
|
1199
|
jpayne@69
|
1200 if amt and amt < 0:
|
jpayne@69
|
1201 # Negative numbers and `None` should be treated the same,
|
jpayne@69
|
1202 # but httplib handles only `None` correctly.
|
jpayne@69
|
1203 amt = None
|
jpayne@69
|
1204
|
jpayne@69
|
1205 while True:
|
jpayne@69
|
1206 self._update_chunk_length()
|
jpayne@69
|
1207 if self.chunk_left == 0:
|
jpayne@69
|
1208 break
|
jpayne@69
|
1209 chunk = self._handle_chunk(amt)
|
jpayne@69
|
1210 decoded = self._decode(
|
jpayne@69
|
1211 chunk, decode_content=decode_content, flush_decoder=False
|
jpayne@69
|
1212 )
|
jpayne@69
|
1213 if decoded:
|
jpayne@69
|
1214 yield decoded
|
jpayne@69
|
1215
|
jpayne@69
|
1216 if decode_content:
|
jpayne@69
|
1217 # On CPython and PyPy, we should never need to flush the
|
jpayne@69
|
1218 # decoder. However, on Jython we *might* need to, so
|
jpayne@69
|
1219 # lets defensively do it anyway.
|
jpayne@69
|
1220 decoded = self._flush_decoder()
|
jpayne@69
|
1221 if decoded: # Platform-specific: Jython.
|
jpayne@69
|
1222 yield decoded
|
jpayne@69
|
1223
|
jpayne@69
|
1224 # Chunk content ends with \r\n: discard it.
|
jpayne@69
|
1225 while self._fp is not None:
|
jpayne@69
|
1226 line = self._fp.fp.readline()
|
jpayne@69
|
1227 if not line:
|
jpayne@69
|
1228 # Some sites may not end with '\r\n'.
|
jpayne@69
|
1229 break
|
jpayne@69
|
1230 if line == b"\r\n":
|
jpayne@69
|
1231 break
|
jpayne@69
|
1232
|
jpayne@69
|
1233 # We read everything; close the "file".
|
jpayne@69
|
1234 if self._original_response:
|
jpayne@69
|
1235 self._original_response.close()
|
jpayne@69
|
1236
|
jpayne@69
|
1237 @property
|
jpayne@69
|
1238 def url(self) -> str | None:
|
jpayne@69
|
1239 """
|
jpayne@69
|
1240 Returns the URL that was the source of this response.
|
jpayne@69
|
1241 If the request that generated this response redirected, this method
|
jpayne@69
|
1242 will return the final redirect location.
|
jpayne@69
|
1243 """
|
jpayne@69
|
1244 return self._request_url
|
jpayne@69
|
1245
|
jpayne@69
|
1246 @url.setter
|
jpayne@69
|
1247 def url(self, url: str) -> None:
|
jpayne@69
|
1248 self._request_url = url
|
jpayne@69
|
1249
|
jpayne@69
|
1250 def __iter__(self) -> typing.Iterator[bytes]:
|
jpayne@69
|
1251 buffer: list[bytes] = []
|
jpayne@69
|
1252 for chunk in self.stream(decode_content=True):
|
jpayne@69
|
1253 if b"\n" in chunk:
|
jpayne@69
|
1254 chunks = chunk.split(b"\n")
|
jpayne@69
|
1255 yield b"".join(buffer) + chunks[0] + b"\n"
|
jpayne@69
|
1256 for x in chunks[1:-1]:
|
jpayne@69
|
1257 yield x + b"\n"
|
jpayne@69
|
1258 if chunks[-1]:
|
jpayne@69
|
1259 buffer = [chunks[-1]]
|
jpayne@69
|
1260 else:
|
jpayne@69
|
1261 buffer = []
|
jpayne@69
|
1262 else:
|
jpayne@69
|
1263 buffer.append(chunk)
|
jpayne@69
|
1264 if buffer:
|
jpayne@69
|
1265 yield b"".join(buffer)
|