jpayne@7
|
1 from __future__ import annotations
|
jpayne@7
|
2
|
jpayne@7
|
3 import collections
|
jpayne@7
|
4 import io
|
jpayne@7
|
5 import json as _json
|
jpayne@7
|
6 import logging
|
jpayne@7
|
7 import re
|
jpayne@7
|
8 import sys
|
jpayne@7
|
9 import typing
|
jpayne@7
|
10 import warnings
|
jpayne@7
|
11 import zlib
|
jpayne@7
|
12 from contextlib import contextmanager
|
jpayne@7
|
13 from http.client import HTTPMessage as _HttplibHTTPMessage
|
jpayne@7
|
14 from http.client import HTTPResponse as _HttplibHTTPResponse
|
jpayne@7
|
15 from socket import timeout as SocketTimeout
|
jpayne@7
|
16
|
jpayne@7
|
17 if typing.TYPE_CHECKING:
|
jpayne@7
|
18 from ._base_connection import BaseHTTPConnection
|
jpayne@7
|
19
|
jpayne@7
|
20 try:
|
jpayne@7
|
21 try:
|
jpayne@7
|
22 import brotlicffi as brotli # type: ignore[import-not-found]
|
jpayne@7
|
23 except ImportError:
|
jpayne@7
|
24 import brotli # type: ignore[import-not-found]
|
jpayne@7
|
25 except ImportError:
|
jpayne@7
|
26 brotli = None
|
jpayne@7
|
27
|
jpayne@7
|
28 try:
|
jpayne@7
|
29 import zstandard as zstd # type: ignore[import-not-found]
|
jpayne@7
|
30
|
jpayne@7
|
31 # The package 'zstandard' added the 'eof' property starting
|
jpayne@7
|
32 # in v0.18.0 which we require to ensure a complete and
|
jpayne@7
|
33 # valid zstd stream was fed into the ZstdDecoder.
|
jpayne@7
|
34 # See: https://github.com/urllib3/urllib3/pull/2624
|
jpayne@7
|
35 _zstd_version = _zstd_version = tuple(
|
jpayne@7
|
36 map(int, re.search(r"^([0-9]+)\.([0-9]+)", zstd.__version__).groups()) # type: ignore[union-attr]
|
jpayne@7
|
37 )
|
jpayne@7
|
38 if _zstd_version < (0, 18): # Defensive:
|
jpayne@7
|
39 zstd = None
|
jpayne@7
|
40
|
jpayne@7
|
41 except (AttributeError, ImportError, ValueError): # Defensive:
|
jpayne@7
|
42 zstd = None
|
jpayne@7
|
43
|
jpayne@7
|
44 from . import util
|
jpayne@7
|
45 from ._base_connection import _TYPE_BODY
|
jpayne@7
|
46 from ._collections import HTTPHeaderDict
|
jpayne@7
|
47 from .connection import BaseSSLError, HTTPConnection, HTTPException
|
jpayne@7
|
48 from .exceptions import (
|
jpayne@7
|
49 BodyNotHttplibCompatible,
|
jpayne@7
|
50 DecodeError,
|
jpayne@7
|
51 HTTPError,
|
jpayne@7
|
52 IncompleteRead,
|
jpayne@7
|
53 InvalidChunkLength,
|
jpayne@7
|
54 InvalidHeader,
|
jpayne@7
|
55 ProtocolError,
|
jpayne@7
|
56 ReadTimeoutError,
|
jpayne@7
|
57 ResponseNotChunked,
|
jpayne@7
|
58 SSLError,
|
jpayne@7
|
59 )
|
jpayne@7
|
60 from .util.response import is_fp_closed, is_response_to_head
|
jpayne@7
|
61 from .util.retry import Retry
|
jpayne@7
|
62
|
jpayne@7
|
63 if typing.TYPE_CHECKING:
|
jpayne@7
|
64 from typing import Literal
|
jpayne@7
|
65
|
jpayne@7
|
66 from .connectionpool import HTTPConnectionPool
|
jpayne@7
|
67
|
jpayne@7
|
68 log = logging.getLogger(__name__)
|
jpayne@7
|
69
|
jpayne@7
|
70
|
jpayne@7
|
71 class ContentDecoder:
|
jpayne@7
|
72 def decompress(self, data: bytes) -> bytes:
|
jpayne@7
|
73 raise NotImplementedError()
|
jpayne@7
|
74
|
jpayne@7
|
75 def flush(self) -> bytes:
|
jpayne@7
|
76 raise NotImplementedError()
|
jpayne@7
|
77
|
jpayne@7
|
78
|
jpayne@7
|
79 class DeflateDecoder(ContentDecoder):
|
jpayne@7
|
80 def __init__(self) -> None:
|
jpayne@7
|
81 self._first_try = True
|
jpayne@7
|
82 self._data = b""
|
jpayne@7
|
83 self._obj = zlib.decompressobj()
|
jpayne@7
|
84
|
jpayne@7
|
85 def decompress(self, data: bytes) -> bytes:
|
jpayne@7
|
86 if not data:
|
jpayne@7
|
87 return data
|
jpayne@7
|
88
|
jpayne@7
|
89 if not self._first_try:
|
jpayne@7
|
90 return self._obj.decompress(data)
|
jpayne@7
|
91
|
jpayne@7
|
92 self._data += data
|
jpayne@7
|
93 try:
|
jpayne@7
|
94 decompressed = self._obj.decompress(data)
|
jpayne@7
|
95 if decompressed:
|
jpayne@7
|
96 self._first_try = False
|
jpayne@7
|
97 self._data = None # type: ignore[assignment]
|
jpayne@7
|
98 return decompressed
|
jpayne@7
|
99 except zlib.error:
|
jpayne@7
|
100 self._first_try = False
|
jpayne@7
|
101 self._obj = zlib.decompressobj(-zlib.MAX_WBITS)
|
jpayne@7
|
102 try:
|
jpayne@7
|
103 return self.decompress(self._data)
|
jpayne@7
|
104 finally:
|
jpayne@7
|
105 self._data = None # type: ignore[assignment]
|
jpayne@7
|
106
|
jpayne@7
|
107 def flush(self) -> bytes:
|
jpayne@7
|
108 return self._obj.flush()
|
jpayne@7
|
109
|
jpayne@7
|
110
|
jpayne@7
|
111 class GzipDecoderState:
|
jpayne@7
|
112 FIRST_MEMBER = 0
|
jpayne@7
|
113 OTHER_MEMBERS = 1
|
jpayne@7
|
114 SWALLOW_DATA = 2
|
jpayne@7
|
115
|
jpayne@7
|
116
|
jpayne@7
|
117 class GzipDecoder(ContentDecoder):
|
jpayne@7
|
118 def __init__(self) -> None:
|
jpayne@7
|
119 self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
|
jpayne@7
|
120 self._state = GzipDecoderState.FIRST_MEMBER
|
jpayne@7
|
121
|
jpayne@7
|
122 def decompress(self, data: bytes) -> bytes:
|
jpayne@7
|
123 ret = bytearray()
|
jpayne@7
|
124 if self._state == GzipDecoderState.SWALLOW_DATA or not data:
|
jpayne@7
|
125 return bytes(ret)
|
jpayne@7
|
126 while True:
|
jpayne@7
|
127 try:
|
jpayne@7
|
128 ret += self._obj.decompress(data)
|
jpayne@7
|
129 except zlib.error:
|
jpayne@7
|
130 previous_state = self._state
|
jpayne@7
|
131 # Ignore data after the first error
|
jpayne@7
|
132 self._state = GzipDecoderState.SWALLOW_DATA
|
jpayne@7
|
133 if previous_state == GzipDecoderState.OTHER_MEMBERS:
|
jpayne@7
|
134 # Allow trailing garbage acceptable in other gzip clients
|
jpayne@7
|
135 return bytes(ret)
|
jpayne@7
|
136 raise
|
jpayne@7
|
137 data = self._obj.unused_data
|
jpayne@7
|
138 if not data:
|
jpayne@7
|
139 return bytes(ret)
|
jpayne@7
|
140 self._state = GzipDecoderState.OTHER_MEMBERS
|
jpayne@7
|
141 self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
|
jpayne@7
|
142
|
jpayne@7
|
143 def flush(self) -> bytes:
|
jpayne@7
|
144 return self._obj.flush()
|
jpayne@7
|
145
|
jpayne@7
|
146
|
jpayne@7
|
147 if brotli is not None:
|
jpayne@7
|
148
|
jpayne@7
|
149 class BrotliDecoder(ContentDecoder):
|
jpayne@7
|
150 # Supports both 'brotlipy' and 'Brotli' packages
|
jpayne@7
|
151 # since they share an import name. The top branches
|
jpayne@7
|
152 # are for 'brotlipy' and bottom branches for 'Brotli'
|
jpayne@7
|
153 def __init__(self) -> None:
|
jpayne@7
|
154 self._obj = brotli.Decompressor()
|
jpayne@7
|
155 if hasattr(self._obj, "decompress"):
|
jpayne@7
|
156 setattr(self, "decompress", self._obj.decompress)
|
jpayne@7
|
157 else:
|
jpayne@7
|
158 setattr(self, "decompress", self._obj.process)
|
jpayne@7
|
159
|
jpayne@7
|
160 def flush(self) -> bytes:
|
jpayne@7
|
161 if hasattr(self._obj, "flush"):
|
jpayne@7
|
162 return self._obj.flush() # type: ignore[no-any-return]
|
jpayne@7
|
163 return b""
|
jpayne@7
|
164
|
jpayne@7
|
165
|
jpayne@7
|
166 if zstd is not None:
|
jpayne@7
|
167
|
jpayne@7
|
168 class ZstdDecoder(ContentDecoder):
|
jpayne@7
|
169 def __init__(self) -> None:
|
jpayne@7
|
170 self._obj = zstd.ZstdDecompressor().decompressobj()
|
jpayne@7
|
171
|
jpayne@7
|
172 def decompress(self, data: bytes) -> bytes:
|
jpayne@7
|
173 if not data:
|
jpayne@7
|
174 return b""
|
jpayne@7
|
175 data_parts = [self._obj.decompress(data)]
|
jpayne@7
|
176 while self._obj.eof and self._obj.unused_data:
|
jpayne@7
|
177 unused_data = self._obj.unused_data
|
jpayne@7
|
178 self._obj = zstd.ZstdDecompressor().decompressobj()
|
jpayne@7
|
179 data_parts.append(self._obj.decompress(unused_data))
|
jpayne@7
|
180 return b"".join(data_parts)
|
jpayne@7
|
181
|
jpayne@7
|
182 def flush(self) -> bytes:
|
jpayne@7
|
183 ret = self._obj.flush() # note: this is a no-op
|
jpayne@7
|
184 if not self._obj.eof:
|
jpayne@7
|
185 raise DecodeError("Zstandard data is incomplete")
|
jpayne@7
|
186 return ret # type: ignore[no-any-return]
|
jpayne@7
|
187
|
jpayne@7
|
188
|
jpayne@7
|
189 class MultiDecoder(ContentDecoder):
|
jpayne@7
|
190 """
|
jpayne@7
|
191 From RFC7231:
|
jpayne@7
|
192 If one or more encodings have been applied to a representation, the
|
jpayne@7
|
193 sender that applied the encodings MUST generate a Content-Encoding
|
jpayne@7
|
194 header field that lists the content codings in the order in which
|
jpayne@7
|
195 they were applied.
|
jpayne@7
|
196 """
|
jpayne@7
|
197
|
jpayne@7
|
198 def __init__(self, modes: str) -> None:
|
jpayne@7
|
199 self._decoders = [_get_decoder(m.strip()) for m in modes.split(",")]
|
jpayne@7
|
200
|
jpayne@7
|
201 def flush(self) -> bytes:
|
jpayne@7
|
202 return self._decoders[0].flush()
|
jpayne@7
|
203
|
jpayne@7
|
204 def decompress(self, data: bytes) -> bytes:
|
jpayne@7
|
205 for d in reversed(self._decoders):
|
jpayne@7
|
206 data = d.decompress(data)
|
jpayne@7
|
207 return data
|
jpayne@7
|
208
|
jpayne@7
|
209
|
jpayne@7
|
210 def _get_decoder(mode: str) -> ContentDecoder:
|
jpayne@7
|
211 if "," in mode:
|
jpayne@7
|
212 return MultiDecoder(mode)
|
jpayne@7
|
213
|
jpayne@7
|
214 # According to RFC 9110 section 8.4.1.3, recipients should
|
jpayne@7
|
215 # consider x-gzip equivalent to gzip
|
jpayne@7
|
216 if mode in ("gzip", "x-gzip"):
|
jpayne@7
|
217 return GzipDecoder()
|
jpayne@7
|
218
|
jpayne@7
|
219 if brotli is not None and mode == "br":
|
jpayne@7
|
220 return BrotliDecoder()
|
jpayne@7
|
221
|
jpayne@7
|
222 if zstd is not None and mode == "zstd":
|
jpayne@7
|
223 return ZstdDecoder()
|
jpayne@7
|
224
|
jpayne@7
|
225 return DeflateDecoder()
|
jpayne@7
|
226
|
jpayne@7
|
227
|
jpayne@7
|
228 class BytesQueueBuffer:
|
jpayne@7
|
229 """Memory-efficient bytes buffer
|
jpayne@7
|
230
|
jpayne@7
|
231 To return decoded data in read() and still follow the BufferedIOBase API, we need a
|
jpayne@7
|
232 buffer to always return the correct amount of bytes.
|
jpayne@7
|
233
|
jpayne@7
|
234 This buffer should be filled using calls to put()
|
jpayne@7
|
235
|
jpayne@7
|
236 Our maximum memory usage is determined by the sum of the size of:
|
jpayne@7
|
237
|
jpayne@7
|
238 * self.buffer, which contains the full data
|
jpayne@7
|
239 * the largest chunk that we will copy in get()
|
jpayne@7
|
240
|
jpayne@7
|
241 The worst case scenario is a single chunk, in which case we'll make a full copy of
|
jpayne@7
|
242 the data inside get().
|
jpayne@7
|
243 """
|
jpayne@7
|
244
|
jpayne@7
|
245 def __init__(self) -> None:
|
jpayne@7
|
246 self.buffer: typing.Deque[bytes] = collections.deque()
|
jpayne@7
|
247 self._size: int = 0
|
jpayne@7
|
248
|
jpayne@7
|
249 def __len__(self) -> int:
|
jpayne@7
|
250 return self._size
|
jpayne@7
|
251
|
jpayne@7
|
252 def put(self, data: bytes) -> None:
|
jpayne@7
|
253 self.buffer.append(data)
|
jpayne@7
|
254 self._size += len(data)
|
jpayne@7
|
255
|
jpayne@7
|
256 def get(self, n: int) -> bytes:
|
jpayne@7
|
257 if n == 0:
|
jpayne@7
|
258 return b""
|
jpayne@7
|
259 elif not self.buffer:
|
jpayne@7
|
260 raise RuntimeError("buffer is empty")
|
jpayne@7
|
261 elif n < 0:
|
jpayne@7
|
262 raise ValueError("n should be > 0")
|
jpayne@7
|
263
|
jpayne@7
|
264 fetched = 0
|
jpayne@7
|
265 ret = io.BytesIO()
|
jpayne@7
|
266 while fetched < n:
|
jpayne@7
|
267 remaining = n - fetched
|
jpayne@7
|
268 chunk = self.buffer.popleft()
|
jpayne@7
|
269 chunk_length = len(chunk)
|
jpayne@7
|
270 if remaining < chunk_length:
|
jpayne@7
|
271 left_chunk, right_chunk = chunk[:remaining], chunk[remaining:]
|
jpayne@7
|
272 ret.write(left_chunk)
|
jpayne@7
|
273 self.buffer.appendleft(right_chunk)
|
jpayne@7
|
274 self._size -= remaining
|
jpayne@7
|
275 break
|
jpayne@7
|
276 else:
|
jpayne@7
|
277 ret.write(chunk)
|
jpayne@7
|
278 self._size -= chunk_length
|
jpayne@7
|
279 fetched += chunk_length
|
jpayne@7
|
280
|
jpayne@7
|
281 if not self.buffer:
|
jpayne@7
|
282 break
|
jpayne@7
|
283
|
jpayne@7
|
284 return ret.getvalue()
|
jpayne@7
|
285
|
jpayne@7
|
286 def get_all(self) -> bytes:
|
jpayne@7
|
287 buffer = self.buffer
|
jpayne@7
|
288 if not buffer:
|
jpayne@7
|
289 assert self._size == 0
|
jpayne@7
|
290 return b""
|
jpayne@7
|
291 if len(buffer) == 1:
|
jpayne@7
|
292 result = buffer.pop()
|
jpayne@7
|
293 else:
|
jpayne@7
|
294 ret = io.BytesIO()
|
jpayne@7
|
295 ret.writelines(buffer.popleft() for _ in range(len(buffer)))
|
jpayne@7
|
296 result = ret.getvalue()
|
jpayne@7
|
297 self._size = 0
|
jpayne@7
|
298 return result
|
jpayne@7
|
299
|
jpayne@7
|
300
|
jpayne@7
|
301 class BaseHTTPResponse(io.IOBase):
|
jpayne@7
|
302 CONTENT_DECODERS = ["gzip", "x-gzip", "deflate"]
|
jpayne@7
|
303 if brotli is not None:
|
jpayne@7
|
304 CONTENT_DECODERS += ["br"]
|
jpayne@7
|
305 if zstd is not None:
|
jpayne@7
|
306 CONTENT_DECODERS += ["zstd"]
|
jpayne@7
|
307 REDIRECT_STATUSES = [301, 302, 303, 307, 308]
|
jpayne@7
|
308
|
jpayne@7
|
309 DECODER_ERROR_CLASSES: tuple[type[Exception], ...] = (IOError, zlib.error)
|
jpayne@7
|
310 if brotli is not None:
|
jpayne@7
|
311 DECODER_ERROR_CLASSES += (brotli.error,)
|
jpayne@7
|
312
|
jpayne@7
|
313 if zstd is not None:
|
jpayne@7
|
314 DECODER_ERROR_CLASSES += (zstd.ZstdError,)
|
jpayne@7
|
315
|
jpayne@7
|
316 def __init__(
|
jpayne@7
|
317 self,
|
jpayne@7
|
318 *,
|
jpayne@7
|
319 headers: typing.Mapping[str, str] | typing.Mapping[bytes, bytes] | None = None,
|
jpayne@7
|
320 status: int,
|
jpayne@7
|
321 version: int,
|
jpayne@7
|
322 reason: str | None,
|
jpayne@7
|
323 decode_content: bool,
|
jpayne@7
|
324 request_url: str | None,
|
jpayne@7
|
325 retries: Retry | None = None,
|
jpayne@7
|
326 ) -> None:
|
jpayne@7
|
327 if isinstance(headers, HTTPHeaderDict):
|
jpayne@7
|
328 self.headers = headers
|
jpayne@7
|
329 else:
|
jpayne@7
|
330 self.headers = HTTPHeaderDict(headers) # type: ignore[arg-type]
|
jpayne@7
|
331 self.status = status
|
jpayne@7
|
332 self.version = version
|
jpayne@7
|
333 self.reason = reason
|
jpayne@7
|
334 self.decode_content = decode_content
|
jpayne@7
|
335 self._has_decoded_content = False
|
jpayne@7
|
336 self._request_url: str | None = request_url
|
jpayne@7
|
337 self.retries = retries
|
jpayne@7
|
338
|
jpayne@7
|
339 self.chunked = False
|
jpayne@7
|
340 tr_enc = self.headers.get("transfer-encoding", "").lower()
|
jpayne@7
|
341 # Don't incur the penalty of creating a list and then discarding it
|
jpayne@7
|
342 encodings = (enc.strip() for enc in tr_enc.split(","))
|
jpayne@7
|
343 if "chunked" in encodings:
|
jpayne@7
|
344 self.chunked = True
|
jpayne@7
|
345
|
jpayne@7
|
346 self._decoder: ContentDecoder | None = None
|
jpayne@7
|
347 self.length_remaining: int | None
|
jpayne@7
|
348
|
jpayne@7
|
349 def get_redirect_location(self) -> str | None | Literal[False]:
|
jpayne@7
|
350 """
|
jpayne@7
|
351 Should we redirect and where to?
|
jpayne@7
|
352
|
jpayne@7
|
353 :returns: Truthy redirect location string if we got a redirect status
|
jpayne@7
|
354 code and valid location. ``None`` if redirect status and no
|
jpayne@7
|
355 location. ``False`` if not a redirect status code.
|
jpayne@7
|
356 """
|
jpayne@7
|
357 if self.status in self.REDIRECT_STATUSES:
|
jpayne@7
|
358 return self.headers.get("location")
|
jpayne@7
|
359 return False
|
jpayne@7
|
360
|
jpayne@7
|
361 @property
|
jpayne@7
|
362 def data(self) -> bytes:
|
jpayne@7
|
363 raise NotImplementedError()
|
jpayne@7
|
364
|
jpayne@7
|
365 def json(self) -> typing.Any:
|
jpayne@7
|
366 """
|
jpayne@7
|
367 Parses the body of the HTTP response as JSON.
|
jpayne@7
|
368
|
jpayne@7
|
369 To use a custom JSON decoder pass the result of :attr:`HTTPResponse.data` to the decoder.
|
jpayne@7
|
370
|
jpayne@7
|
371 This method can raise either `UnicodeDecodeError` or `json.JSONDecodeError`.
|
jpayne@7
|
372
|
jpayne@7
|
373 Read more :ref:`here <json>`.
|
jpayne@7
|
374 """
|
jpayne@7
|
375 data = self.data.decode("utf-8")
|
jpayne@7
|
376 return _json.loads(data)
|
jpayne@7
|
377
|
jpayne@7
|
378 @property
|
jpayne@7
|
379 def url(self) -> str | None:
|
jpayne@7
|
380 raise NotImplementedError()
|
jpayne@7
|
381
|
jpayne@7
|
382 @url.setter
|
jpayne@7
|
383 def url(self, url: str | None) -> None:
|
jpayne@7
|
384 raise NotImplementedError()
|
jpayne@7
|
385
|
jpayne@7
|
386 @property
|
jpayne@7
|
387 def connection(self) -> BaseHTTPConnection | None:
|
jpayne@7
|
388 raise NotImplementedError()
|
jpayne@7
|
389
|
jpayne@7
|
390 @property
|
jpayne@7
|
391 def retries(self) -> Retry | None:
|
jpayne@7
|
392 return self._retries
|
jpayne@7
|
393
|
jpayne@7
|
394 @retries.setter
|
jpayne@7
|
395 def retries(self, retries: Retry | None) -> None:
|
jpayne@7
|
396 # Override the request_url if retries has a redirect location.
|
jpayne@7
|
397 if retries is not None and retries.history:
|
jpayne@7
|
398 self.url = retries.history[-1].redirect_location
|
jpayne@7
|
399 self._retries = retries
|
jpayne@7
|
400
|
jpayne@7
|
401 def stream(
|
jpayne@7
|
402 self, amt: int | None = 2**16, decode_content: bool | None = None
|
jpayne@7
|
403 ) -> typing.Iterator[bytes]:
|
jpayne@7
|
404 raise NotImplementedError()
|
jpayne@7
|
405
|
jpayne@7
|
406 def read(
|
jpayne@7
|
407 self,
|
jpayne@7
|
408 amt: int | None = None,
|
jpayne@7
|
409 decode_content: bool | None = None,
|
jpayne@7
|
410 cache_content: bool = False,
|
jpayne@7
|
411 ) -> bytes:
|
jpayne@7
|
412 raise NotImplementedError()
|
jpayne@7
|
413
|
jpayne@7
|
414 def read1(
|
jpayne@7
|
415 self,
|
jpayne@7
|
416 amt: int | None = None,
|
jpayne@7
|
417 decode_content: bool | None = None,
|
jpayne@7
|
418 ) -> bytes:
|
jpayne@7
|
419 raise NotImplementedError()
|
jpayne@7
|
420
|
jpayne@7
|
421 def read_chunked(
|
jpayne@7
|
422 self,
|
jpayne@7
|
423 amt: int | None = None,
|
jpayne@7
|
424 decode_content: bool | None = None,
|
jpayne@7
|
425 ) -> typing.Iterator[bytes]:
|
jpayne@7
|
426 raise NotImplementedError()
|
jpayne@7
|
427
|
jpayne@7
|
428 def release_conn(self) -> None:
|
jpayne@7
|
429 raise NotImplementedError()
|
jpayne@7
|
430
|
jpayne@7
|
431 def drain_conn(self) -> None:
|
jpayne@7
|
432 raise NotImplementedError()
|
jpayne@7
|
433
|
jpayne@7
|
434 def close(self) -> None:
|
jpayne@7
|
435 raise NotImplementedError()
|
jpayne@7
|
436
|
jpayne@7
|
437 def _init_decoder(self) -> None:
|
jpayne@7
|
438 """
|
jpayne@7
|
439 Set-up the _decoder attribute if necessary.
|
jpayne@7
|
440 """
|
jpayne@7
|
441 # Note: content-encoding value should be case-insensitive, per RFC 7230
|
jpayne@7
|
442 # Section 3.2
|
jpayne@7
|
443 content_encoding = self.headers.get("content-encoding", "").lower()
|
jpayne@7
|
444 if self._decoder is None:
|
jpayne@7
|
445 if content_encoding in self.CONTENT_DECODERS:
|
jpayne@7
|
446 self._decoder = _get_decoder(content_encoding)
|
jpayne@7
|
447 elif "," in content_encoding:
|
jpayne@7
|
448 encodings = [
|
jpayne@7
|
449 e.strip()
|
jpayne@7
|
450 for e in content_encoding.split(",")
|
jpayne@7
|
451 if e.strip() in self.CONTENT_DECODERS
|
jpayne@7
|
452 ]
|
jpayne@7
|
453 if encodings:
|
jpayne@7
|
454 self._decoder = _get_decoder(content_encoding)
|
jpayne@7
|
455
|
jpayne@7
|
456 def _decode(
|
jpayne@7
|
457 self, data: bytes, decode_content: bool | None, flush_decoder: bool
|
jpayne@7
|
458 ) -> bytes:
|
jpayne@7
|
459 """
|
jpayne@7
|
460 Decode the data passed in and potentially flush the decoder.
|
jpayne@7
|
461 """
|
jpayne@7
|
462 if not decode_content:
|
jpayne@7
|
463 if self._has_decoded_content:
|
jpayne@7
|
464 raise RuntimeError(
|
jpayne@7
|
465 "Calling read(decode_content=False) is not supported after "
|
jpayne@7
|
466 "read(decode_content=True) was called."
|
jpayne@7
|
467 )
|
jpayne@7
|
468 return data
|
jpayne@7
|
469
|
jpayne@7
|
470 try:
|
jpayne@7
|
471 if self._decoder:
|
jpayne@7
|
472 data = self._decoder.decompress(data)
|
jpayne@7
|
473 self._has_decoded_content = True
|
jpayne@7
|
474 except self.DECODER_ERROR_CLASSES as e:
|
jpayne@7
|
475 content_encoding = self.headers.get("content-encoding", "").lower()
|
jpayne@7
|
476 raise DecodeError(
|
jpayne@7
|
477 "Received response with content-encoding: %s, but "
|
jpayne@7
|
478 "failed to decode it." % content_encoding,
|
jpayne@7
|
479 e,
|
jpayne@7
|
480 ) from e
|
jpayne@7
|
481 if flush_decoder:
|
jpayne@7
|
482 data += self._flush_decoder()
|
jpayne@7
|
483
|
jpayne@7
|
484 return data
|
jpayne@7
|
485
|
jpayne@7
|
486 def _flush_decoder(self) -> bytes:
|
jpayne@7
|
487 """
|
jpayne@7
|
488 Flushes the decoder. Should only be called if the decoder is actually
|
jpayne@7
|
489 being used.
|
jpayne@7
|
490 """
|
jpayne@7
|
491 if self._decoder:
|
jpayne@7
|
492 return self._decoder.decompress(b"") + self._decoder.flush()
|
jpayne@7
|
493 return b""
|
jpayne@7
|
494
|
jpayne@7
|
495 # Compatibility methods for `io` module
|
jpayne@7
|
496 def readinto(self, b: bytearray) -> int:
|
jpayne@7
|
497 temp = self.read(len(b))
|
jpayne@7
|
498 if len(temp) == 0:
|
jpayne@7
|
499 return 0
|
jpayne@7
|
500 else:
|
jpayne@7
|
501 b[: len(temp)] = temp
|
jpayne@7
|
502 return len(temp)
|
jpayne@7
|
503
|
jpayne@7
|
504 # Compatibility methods for http.client.HTTPResponse
|
jpayne@7
|
505 def getheaders(self) -> HTTPHeaderDict:
|
jpayne@7
|
506 warnings.warn(
|
jpayne@7
|
507 "HTTPResponse.getheaders() is deprecated and will be removed "
|
jpayne@7
|
508 "in urllib3 v2.1.0. Instead access HTTPResponse.headers directly.",
|
jpayne@7
|
509 category=DeprecationWarning,
|
jpayne@7
|
510 stacklevel=2,
|
jpayne@7
|
511 )
|
jpayne@7
|
512 return self.headers
|
jpayne@7
|
513
|
jpayne@7
|
514 def getheader(self, name: str, default: str | None = None) -> str | None:
|
jpayne@7
|
515 warnings.warn(
|
jpayne@7
|
516 "HTTPResponse.getheader() is deprecated and will be removed "
|
jpayne@7
|
517 "in urllib3 v2.1.0. Instead use HTTPResponse.headers.get(name, default).",
|
jpayne@7
|
518 category=DeprecationWarning,
|
jpayne@7
|
519 stacklevel=2,
|
jpayne@7
|
520 )
|
jpayne@7
|
521 return self.headers.get(name, default)
|
jpayne@7
|
522
|
jpayne@7
|
523 # Compatibility method for http.cookiejar
|
jpayne@7
|
524 def info(self) -> HTTPHeaderDict:
|
jpayne@7
|
525 return self.headers
|
jpayne@7
|
526
|
jpayne@7
|
527 def geturl(self) -> str | None:
|
jpayne@7
|
528 return self.url
|
jpayne@7
|
529
|
jpayne@7
|
530
|
jpayne@7
|
531 class HTTPResponse(BaseHTTPResponse):
|
jpayne@7
|
532 """
|
jpayne@7
|
533 HTTP Response container.
|
jpayne@7
|
534
|
jpayne@7
|
535 Backwards-compatible with :class:`http.client.HTTPResponse` but the response ``body`` is
|
jpayne@7
|
536 loaded and decoded on-demand when the ``data`` property is accessed. This
|
jpayne@7
|
537 class is also compatible with the Python standard library's :mod:`io`
|
jpayne@7
|
538 module, and can hence be treated as a readable object in the context of that
|
jpayne@7
|
539 framework.
|
jpayne@7
|
540
|
jpayne@7
|
541 Extra parameters for behaviour not present in :class:`http.client.HTTPResponse`:
|
jpayne@7
|
542
|
jpayne@7
|
543 :param preload_content:
|
jpayne@7
|
544 If True, the response's body will be preloaded during construction.
|
jpayne@7
|
545
|
jpayne@7
|
546 :param decode_content:
|
jpayne@7
|
547 If True, will attempt to decode the body based on the
|
jpayne@7
|
548 'content-encoding' header.
|
jpayne@7
|
549
|
jpayne@7
|
550 :param original_response:
|
jpayne@7
|
551 When this HTTPResponse wrapper is generated from an :class:`http.client.HTTPResponse`
|
jpayne@7
|
552 object, it's convenient to include the original for debug purposes. It's
|
jpayne@7
|
553 otherwise unused.
|
jpayne@7
|
554
|
jpayne@7
|
555 :param retries:
|
jpayne@7
|
556 The retries contains the last :class:`~urllib3.util.retry.Retry` that
|
jpayne@7
|
557 was used during the request.
|
jpayne@7
|
558
|
jpayne@7
|
559 :param enforce_content_length:
|
jpayne@7
|
560 Enforce content length checking. Body returned by server must match
|
jpayne@7
|
561 value of Content-Length header, if present. Otherwise, raise error.
|
jpayne@7
|
562 """
|
jpayne@7
|
563
|
jpayne@7
|
564 def __init__(
|
jpayne@7
|
565 self,
|
jpayne@7
|
566 body: _TYPE_BODY = "",
|
jpayne@7
|
567 headers: typing.Mapping[str, str] | typing.Mapping[bytes, bytes] | None = None,
|
jpayne@7
|
568 status: int = 0,
|
jpayne@7
|
569 version: int = 0,
|
jpayne@7
|
570 reason: str | None = None,
|
jpayne@7
|
571 preload_content: bool = True,
|
jpayne@7
|
572 decode_content: bool = True,
|
jpayne@7
|
573 original_response: _HttplibHTTPResponse | None = None,
|
jpayne@7
|
574 pool: HTTPConnectionPool | None = None,
|
jpayne@7
|
575 connection: HTTPConnection | None = None,
|
jpayne@7
|
576 msg: _HttplibHTTPMessage | None = None,
|
jpayne@7
|
577 retries: Retry | None = None,
|
jpayne@7
|
578 enforce_content_length: bool = True,
|
jpayne@7
|
579 request_method: str | None = None,
|
jpayne@7
|
580 request_url: str | None = None,
|
jpayne@7
|
581 auto_close: bool = True,
|
jpayne@7
|
582 ) -> None:
|
jpayne@7
|
583 super().__init__(
|
jpayne@7
|
584 headers=headers,
|
jpayne@7
|
585 status=status,
|
jpayne@7
|
586 version=version,
|
jpayne@7
|
587 reason=reason,
|
jpayne@7
|
588 decode_content=decode_content,
|
jpayne@7
|
589 request_url=request_url,
|
jpayne@7
|
590 retries=retries,
|
jpayne@7
|
591 )
|
jpayne@7
|
592
|
jpayne@7
|
593 self.enforce_content_length = enforce_content_length
|
jpayne@7
|
594 self.auto_close = auto_close
|
jpayne@7
|
595
|
jpayne@7
|
596 self._body = None
|
jpayne@7
|
597 self._fp: _HttplibHTTPResponse | None = None
|
jpayne@7
|
598 self._original_response = original_response
|
jpayne@7
|
599 self._fp_bytes_read = 0
|
jpayne@7
|
600 self.msg = msg
|
jpayne@7
|
601
|
jpayne@7
|
602 if body and isinstance(body, (str, bytes)):
|
jpayne@7
|
603 self._body = body
|
jpayne@7
|
604
|
jpayne@7
|
605 self._pool = pool
|
jpayne@7
|
606 self._connection = connection
|
jpayne@7
|
607
|
jpayne@7
|
608 if hasattr(body, "read"):
|
jpayne@7
|
609 self._fp = body # type: ignore[assignment]
|
jpayne@7
|
610
|
jpayne@7
|
611 # Are we using the chunked-style of transfer encoding?
|
jpayne@7
|
612 self.chunk_left: int | None = None
|
jpayne@7
|
613
|
jpayne@7
|
614 # Determine length of response
|
jpayne@7
|
615 self.length_remaining = self._init_length(request_method)
|
jpayne@7
|
616
|
jpayne@7
|
617 # Used to return the correct amount of bytes for partial read()s
|
jpayne@7
|
618 self._decoded_buffer = BytesQueueBuffer()
|
jpayne@7
|
619
|
jpayne@7
|
620 # If requested, preload the body.
|
jpayne@7
|
621 if preload_content and not self._body:
|
jpayne@7
|
622 self._body = self.read(decode_content=decode_content)
|
jpayne@7
|
623
|
jpayne@7
|
624 def release_conn(self) -> None:
|
jpayne@7
|
625 if not self._pool or not self._connection:
|
jpayne@7
|
626 return None
|
jpayne@7
|
627
|
jpayne@7
|
628 self._pool._put_conn(self._connection)
|
jpayne@7
|
629 self._connection = None
|
jpayne@7
|
630
|
jpayne@7
|
631 def drain_conn(self) -> None:
|
jpayne@7
|
632 """
|
jpayne@7
|
633 Read and discard any remaining HTTP response data in the response connection.
|
jpayne@7
|
634
|
jpayne@7
|
635 Unread data in the HTTPResponse connection blocks the connection from being released back to the pool.
|
jpayne@7
|
636 """
|
jpayne@7
|
637 try:
|
jpayne@7
|
638 self.read()
|
jpayne@7
|
639 except (HTTPError, OSError, BaseSSLError, HTTPException):
|
jpayne@7
|
640 pass
|
jpayne@7
|
641
|
jpayne@7
|
642 @property
|
jpayne@7
|
643 def data(self) -> bytes:
|
jpayne@7
|
644 # For backwards-compat with earlier urllib3 0.4 and earlier.
|
jpayne@7
|
645 if self._body:
|
jpayne@7
|
646 return self._body # type: ignore[return-value]
|
jpayne@7
|
647
|
jpayne@7
|
648 if self._fp:
|
jpayne@7
|
649 return self.read(cache_content=True)
|
jpayne@7
|
650
|
jpayne@7
|
651 return None # type: ignore[return-value]
|
jpayne@7
|
652
|
jpayne@7
|
653 @property
|
jpayne@7
|
654 def connection(self) -> HTTPConnection | None:
|
jpayne@7
|
655 return self._connection
|
jpayne@7
|
656
|
jpayne@7
|
657 def isclosed(self) -> bool:
|
jpayne@7
|
658 return is_fp_closed(self._fp)
|
jpayne@7
|
659
|
jpayne@7
|
660 def tell(self) -> int:
|
jpayne@7
|
661 """
|
jpayne@7
|
662 Obtain the number of bytes pulled over the wire so far. May differ from
|
jpayne@7
|
663 the amount of content returned by :meth:``urllib3.response.HTTPResponse.read``
|
jpayne@7
|
664 if bytes are encoded on the wire (e.g, compressed).
|
jpayne@7
|
665 """
|
jpayne@7
|
666 return self._fp_bytes_read
|
jpayne@7
|
667
|
jpayne@7
|
668 def _init_length(self, request_method: str | None) -> int | None:
|
jpayne@7
|
669 """
|
jpayne@7
|
670 Set initial length value for Response content if available.
|
jpayne@7
|
671 """
|
jpayne@7
|
672 length: int | None
|
jpayne@7
|
673 content_length: str | None = self.headers.get("content-length")
|
jpayne@7
|
674
|
jpayne@7
|
675 if content_length is not None:
|
jpayne@7
|
676 if self.chunked:
|
jpayne@7
|
677 # This Response will fail with an IncompleteRead if it can't be
|
jpayne@7
|
678 # received as chunked. This method falls back to attempt reading
|
jpayne@7
|
679 # the response before raising an exception.
|
jpayne@7
|
680 log.warning(
|
jpayne@7
|
681 "Received response with both Content-Length and "
|
jpayne@7
|
682 "Transfer-Encoding set. This is expressly forbidden "
|
jpayne@7
|
683 "by RFC 7230 sec 3.3.2. Ignoring Content-Length and "
|
jpayne@7
|
684 "attempting to process response as Transfer-Encoding: "
|
jpayne@7
|
685 "chunked."
|
jpayne@7
|
686 )
|
jpayne@7
|
687 return None
|
jpayne@7
|
688
|
jpayne@7
|
689 try:
|
jpayne@7
|
690 # RFC 7230 section 3.3.2 specifies multiple content lengths can
|
jpayne@7
|
691 # be sent in a single Content-Length header
|
jpayne@7
|
692 # (e.g. Content-Length: 42, 42). This line ensures the values
|
jpayne@7
|
693 # are all valid ints and that as long as the `set` length is 1,
|
jpayne@7
|
694 # all values are the same. Otherwise, the header is invalid.
|
jpayne@7
|
695 lengths = {int(val) for val in content_length.split(",")}
|
jpayne@7
|
696 if len(lengths) > 1:
|
jpayne@7
|
697 raise InvalidHeader(
|
jpayne@7
|
698 "Content-Length contained multiple "
|
jpayne@7
|
699 "unmatching values (%s)" % content_length
|
jpayne@7
|
700 )
|
jpayne@7
|
701 length = lengths.pop()
|
jpayne@7
|
702 except ValueError:
|
jpayne@7
|
703 length = None
|
jpayne@7
|
704 else:
|
jpayne@7
|
705 if length < 0:
|
jpayne@7
|
706 length = None
|
jpayne@7
|
707
|
jpayne@7
|
708 else: # if content_length is None
|
jpayne@7
|
709 length = None
|
jpayne@7
|
710
|
jpayne@7
|
711 # Convert status to int for comparison
|
jpayne@7
|
712 # In some cases, httplib returns a status of "_UNKNOWN"
|
jpayne@7
|
713 try:
|
jpayne@7
|
714 status = int(self.status)
|
jpayne@7
|
715 except ValueError:
|
jpayne@7
|
716 status = 0
|
jpayne@7
|
717
|
jpayne@7
|
718 # Check for responses that shouldn't include a body
|
jpayne@7
|
719 if status in (204, 304) or 100 <= status < 200 or request_method == "HEAD":
|
jpayne@7
|
720 length = 0
|
jpayne@7
|
721
|
jpayne@7
|
722 return length
|
jpayne@7
|
723
|
jpayne@7
|
724 @contextmanager
|
jpayne@7
|
725 def _error_catcher(self) -> typing.Generator[None, None, None]:
|
jpayne@7
|
726 """
|
jpayne@7
|
727 Catch low-level python exceptions, instead re-raising urllib3
|
jpayne@7
|
728 variants, so that low-level exceptions are not leaked in the
|
jpayne@7
|
729 high-level api.
|
jpayne@7
|
730
|
jpayne@7
|
731 On exit, release the connection back to the pool.
|
jpayne@7
|
732 """
|
jpayne@7
|
733 clean_exit = False
|
jpayne@7
|
734
|
jpayne@7
|
735 try:
|
jpayne@7
|
736 try:
|
jpayne@7
|
737 yield
|
jpayne@7
|
738
|
jpayne@7
|
739 except SocketTimeout as e:
|
jpayne@7
|
740 # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but
|
jpayne@7
|
741 # there is yet no clean way to get at it from this context.
|
jpayne@7
|
742 raise ReadTimeoutError(self._pool, None, "Read timed out.") from e # type: ignore[arg-type]
|
jpayne@7
|
743
|
jpayne@7
|
744 except BaseSSLError as e:
|
jpayne@7
|
745 # FIXME: Is there a better way to differentiate between SSLErrors?
|
jpayne@7
|
746 if "read operation timed out" not in str(e):
|
jpayne@7
|
747 # SSL errors related to framing/MAC get wrapped and reraised here
|
jpayne@7
|
748 raise SSLError(e) from e
|
jpayne@7
|
749
|
jpayne@7
|
750 raise ReadTimeoutError(self._pool, None, "Read timed out.") from e # type: ignore[arg-type]
|
jpayne@7
|
751
|
jpayne@7
|
752 except IncompleteRead as e:
|
jpayne@7
|
753 if (
|
jpayne@7
|
754 e.expected is not None
|
jpayne@7
|
755 and e.partial is not None
|
jpayne@7
|
756 and e.expected == -e.partial
|
jpayne@7
|
757 ):
|
jpayne@7
|
758 arg = "Response may not contain content."
|
jpayne@7
|
759 else:
|
jpayne@7
|
760 arg = f"Connection broken: {e!r}"
|
jpayne@7
|
761 raise ProtocolError(arg, e) from e
|
jpayne@7
|
762
|
jpayne@7
|
763 except (HTTPException, OSError) as e:
|
jpayne@7
|
764 raise ProtocolError(f"Connection broken: {e!r}", e) from e
|
jpayne@7
|
765
|
jpayne@7
|
766 # If no exception is thrown, we should avoid cleaning up
|
jpayne@7
|
767 # unnecessarily.
|
jpayne@7
|
768 clean_exit = True
|
jpayne@7
|
769 finally:
|
jpayne@7
|
770 # If we didn't terminate cleanly, we need to throw away our
|
jpayne@7
|
771 # connection.
|
jpayne@7
|
772 if not clean_exit:
|
jpayne@7
|
773 # The response may not be closed but we're not going to use it
|
jpayne@7
|
774 # anymore so close it now to ensure that the connection is
|
jpayne@7
|
775 # released back to the pool.
|
jpayne@7
|
776 if self._original_response:
|
jpayne@7
|
777 self._original_response.close()
|
jpayne@7
|
778
|
jpayne@7
|
779 # Closing the response may not actually be sufficient to close
|
jpayne@7
|
780 # everything, so if we have a hold of the connection close that
|
jpayne@7
|
781 # too.
|
jpayne@7
|
782 if self._connection:
|
jpayne@7
|
783 self._connection.close()
|
jpayne@7
|
784
|
jpayne@7
|
785 # If we hold the original response but it's closed now, we should
|
jpayne@7
|
786 # return the connection back to the pool.
|
jpayne@7
|
787 if self._original_response and self._original_response.isclosed():
|
jpayne@7
|
788 self.release_conn()
|
jpayne@7
|
789
|
jpayne@7
|
790 def _fp_read(
|
jpayne@7
|
791 self,
|
jpayne@7
|
792 amt: int | None = None,
|
jpayne@7
|
793 *,
|
jpayne@7
|
794 read1: bool = False,
|
jpayne@7
|
795 ) -> bytes:
|
jpayne@7
|
796 """
|
jpayne@7
|
|