comparison CSP2/CSP2_env/env-d9b9114564458d9d-741b3de822f2aaca6c6caa4325c4afce/lib/python3.8/http/client.py @ 69:33d812a61356

planemo upload commit 2e9511a184a1ca667c7be0c6321a36dc4e3d116d
author jpayne
date Tue, 18 Mar 2025 17:55:14 -0400
parents
children
comparison
equal deleted inserted replaced
67:0e9998148a16 69:33d812a61356
1 r"""HTTP/1.1 client library
2
3 <intro stuff goes here>
4 <other stuff, too>
5
6 HTTPConnection goes through a number of "states", which define when a client
7 may legally make another request or fetch the response for a particular
8 request. This diagram details these state transitions:
9
10 (null)
11 |
12 | HTTPConnection()
13 v
14 Idle
15 |
16 | putrequest()
17 v
18 Request-started
19 |
20 | ( putheader() )* endheaders()
21 v
22 Request-sent
23 |\_____________________________
24 | | getresponse() raises
25 | response = getresponse() | ConnectionError
26 v v
27 Unread-response Idle
28 [Response-headers-read]
29 |\____________________
30 | |
31 | response.read() | putrequest()
32 v v
33 Idle Req-started-unread-response
34 ______/|
35 / |
36 response.read() | | ( putheader() )* endheaders()
37 v v
38 Request-started Req-sent-unread-response
39 |
40 | response.read()
41 v
42 Request-sent
43
44 This diagram presents the following rules:
45 -- a second request may not be started until {response-headers-read}
46 -- a response [object] cannot be retrieved until {request-sent}
47 -- there is no differentiation between an unread response body and a
48 partially read response body
49
50 Note: this enforcement is applied by the HTTPConnection class. The
51 HTTPResponse class does not enforce this state machine, which
52 implies sophisticated clients may accelerate the request/response
53 pipeline. Caution should be taken, though: accelerating the states
54 beyond the above pattern may imply knowledge of the server's
55 connection-close behavior for certain requests. For example, it
56 is impossible to tell whether the server will close the connection
57 UNTIL the response headers have been read; this means that further
58 requests cannot be placed into the pipeline until it is known that
59 the server will NOT be closing the connection.
60
61 Logical State __state __response
62 ------------- ------- ----------
63 Idle _CS_IDLE None
64 Request-started _CS_REQ_STARTED None
65 Request-sent _CS_REQ_SENT None
66 Unread-response _CS_IDLE <response_class>
67 Req-started-unread-response _CS_REQ_STARTED <response_class>
68 Req-sent-unread-response _CS_REQ_SENT <response_class>
69 """
70
71 import email.parser
72 import email.message
73 import http
74 import io
75 import re
76 import socket
77 import collections.abc
78 from urllib.parse import urlsplit
79
80 # HTTPMessage, parse_headers(), and the HTTP status code constants are
81 # intentionally omitted for simplicity
82 __all__ = ["HTTPResponse", "HTTPConnection",
83 "HTTPException", "NotConnected", "UnknownProtocol",
84 "UnknownTransferEncoding", "UnimplementedFileMode",
85 "IncompleteRead", "InvalidURL", "ImproperConnectionState",
86 "CannotSendRequest", "CannotSendHeader", "ResponseNotReady",
87 "BadStatusLine", "LineTooLong", "RemoteDisconnected", "error",
88 "responses"]
89
90 HTTP_PORT = 80
91 HTTPS_PORT = 443
92
93 _UNKNOWN = 'UNKNOWN'
94
95 # connection states
96 _CS_IDLE = 'Idle'
97 _CS_REQ_STARTED = 'Request-started'
98 _CS_REQ_SENT = 'Request-sent'
99
100
101 # hack to maintain backwards compatibility
102 globals().update(http.HTTPStatus.__members__)
103
104 # another hack to maintain backwards compatibility
105 # Mapping status codes to official W3C names
106 responses = {v: v.phrase for v in http.HTTPStatus.__members__.values()}
107
108 # maximal line length when calling readline().
109 _MAXLINE = 65536
110 _MAXHEADERS = 100
111
112 # Header name/value ABNF (http://tools.ietf.org/html/rfc7230#section-3.2)
113 #
114 # VCHAR = %x21-7E
115 # obs-text = %x80-FF
116 # header-field = field-name ":" OWS field-value OWS
117 # field-name = token
118 # field-value = *( field-content / obs-fold )
119 # field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ]
120 # field-vchar = VCHAR / obs-text
121 #
122 # obs-fold = CRLF 1*( SP / HTAB )
123 # ; obsolete line folding
124 # ; see Section 3.2.4
125
126 # token = 1*tchar
127 #
128 # tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*"
129 # / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~"
130 # / DIGIT / ALPHA
131 # ; any VCHAR, except delimiters
132 #
133 # VCHAR defined in http://tools.ietf.org/html/rfc5234#appendix-B.1
134
135 # the patterns for both name and value are more lenient than RFC
136 # definitions to allow for backwards compatibility
137 _is_legal_header_name = re.compile(rb'[^:\s][^:\r\n]*').fullmatch
138 _is_illegal_header_value = re.compile(rb'\n(?![ \t])|\r(?![ \t\n])').search
139
140 # These characters are not allowed within HTTP URL paths.
141 # See https://tools.ietf.org/html/rfc3986#section-3.3 and the
142 # https://tools.ietf.org/html/rfc3986#appendix-A pchar definition.
143 # Prevents CVE-2019-9740. Includes control characters such as \r\n.
144 # We don't restrict chars above \x7f as putrequest() limits us to ASCII.
145 _contains_disallowed_url_pchar_re = re.compile('[\x00-\x20\x7f]')
146 # Arguably only these _should_ allowed:
147 # _is_allowed_url_pchars_re = re.compile(r"^[/!$&'()*+,;=:@%a-zA-Z0-9._~-]+$")
148 # We are more lenient for assumed real world compatibility purposes.
149
150 # We always set the Content-Length header for these methods because some
151 # servers will otherwise respond with a 411
152 _METHODS_EXPECTING_BODY = {'PATCH', 'POST', 'PUT'}
153
154
155 def _encode(data, name='data'):
156 """Call data.encode("latin-1") but show a better error message."""
157 try:
158 return data.encode("latin-1")
159 except UnicodeEncodeError as err:
160 raise UnicodeEncodeError(
161 err.encoding,
162 err.object,
163 err.start,
164 err.end,
165 "%s (%.20r) is not valid Latin-1. Use %s.encode('utf-8') "
166 "if you want to send it encoded in UTF-8." %
167 (name.title(), data[err.start:err.end], name)) from None
168
169
170 class HTTPMessage(email.message.Message):
171 # XXX The only usage of this method is in
172 # http.server.CGIHTTPRequestHandler. Maybe move the code there so
173 # that it doesn't need to be part of the public API. The API has
174 # never been defined so this could cause backwards compatibility
175 # issues.
176
177 def getallmatchingheaders(self, name):
178 """Find all header lines matching a given header name.
179
180 Look through the list of headers and find all lines matching a given
181 header name (and their continuation lines). A list of the lines is
182 returned, without interpretation. If the header does not occur, an
183 empty list is returned. If the header occurs multiple times, all
184 occurrences are returned. Case is not important in the header name.
185
186 """
187 name = name.lower() + ':'
188 n = len(name)
189 lst = []
190 hit = 0
191 for line in self.keys():
192 if line[:n].lower() == name:
193 hit = 1
194 elif not line[:1].isspace():
195 hit = 0
196 if hit:
197 lst.append(line)
198 return lst
199
200 def parse_headers(fp, _class=HTTPMessage):
201 """Parses only RFC2822 headers from a file pointer.
202
203 email Parser wants to see strings rather than bytes.
204 But a TextIOWrapper around self.rfile would buffer too many bytes
205 from the stream, bytes which we later need to read as bytes.
206 So we read the correct bytes here, as bytes, for email Parser
207 to parse.
208
209 """
210 headers = []
211 while True:
212 line = fp.readline(_MAXLINE + 1)
213 if len(line) > _MAXLINE:
214 raise LineTooLong("header line")
215 headers.append(line)
216 if len(headers) > _MAXHEADERS:
217 raise HTTPException("got more than %d headers" % _MAXHEADERS)
218 if line in (b'\r\n', b'\n', b''):
219 break
220 hstring = b''.join(headers).decode('iso-8859-1')
221 return email.parser.Parser(_class=_class).parsestr(hstring)
222
223
224 class HTTPResponse(io.BufferedIOBase):
225
226 # See RFC 2616 sec 19.6 and RFC 1945 sec 6 for details.
227
228 # The bytes from the socket object are iso-8859-1 strings.
229 # See RFC 2616 sec 2.2 which notes an exception for MIME-encoded
230 # text following RFC 2047. The basic status line parsing only
231 # accepts iso-8859-1.
232
233 def __init__(self, sock, debuglevel=0, method=None, url=None):
234 # If the response includes a content-length header, we need to
235 # make sure that the client doesn't read more than the
236 # specified number of bytes. If it does, it will block until
237 # the server times out and closes the connection. This will
238 # happen if a self.fp.read() is done (without a size) whether
239 # self.fp is buffered or not. So, no self.fp.read() by
240 # clients unless they know what they are doing.
241 self.fp = sock.makefile("rb")
242 self.debuglevel = debuglevel
243 self._method = method
244
245 # The HTTPResponse object is returned via urllib. The clients
246 # of http and urllib expect different attributes for the
247 # headers. headers is used here and supports urllib. msg is
248 # provided as a backwards compatibility layer for http
249 # clients.
250
251 self.headers = self.msg = None
252
253 # from the Status-Line of the response
254 self.version = _UNKNOWN # HTTP-Version
255 self.status = _UNKNOWN # Status-Code
256 self.reason = _UNKNOWN # Reason-Phrase
257
258 self.chunked = _UNKNOWN # is "chunked" being used?
259 self.chunk_left = _UNKNOWN # bytes left to read in current chunk
260 self.length = _UNKNOWN # number of bytes left in response
261 self.will_close = _UNKNOWN # conn will close at end of response
262
263 def _read_status(self):
264 line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
265 if len(line) > _MAXLINE:
266 raise LineTooLong("status line")
267 if self.debuglevel > 0:
268 print("reply:", repr(line))
269 if not line:
270 # Presumably, the server closed the connection before
271 # sending a valid response.
272 raise RemoteDisconnected("Remote end closed connection without"
273 " response")
274 try:
275 version, status, reason = line.split(None, 2)
276 except ValueError:
277 try:
278 version, status = line.split(None, 1)
279 reason = ""
280 except ValueError:
281 # empty version will cause next test to fail.
282 version = ""
283 if not version.startswith("HTTP/"):
284 self._close_conn()
285 raise BadStatusLine(line)
286
287 # The status code is a three-digit number
288 try:
289 status = int(status)
290 if status < 100 or status > 999:
291 raise BadStatusLine(line)
292 except ValueError:
293 raise BadStatusLine(line)
294 return version, status, reason
295
296 def begin(self):
297 if self.headers is not None:
298 # we've already started reading the response
299 return
300
301 # read until we get a non-100 response
302 while True:
303 version, status, reason = self._read_status()
304 if status != CONTINUE:
305 break
306 # skip the header from the 100 response
307 while True:
308 skip = self.fp.readline(_MAXLINE + 1)
309 if len(skip) > _MAXLINE:
310 raise LineTooLong("header line")
311 skip = skip.strip()
312 if not skip:
313 break
314 if self.debuglevel > 0:
315 print("header:", skip)
316
317 self.code = self.status = status
318 self.reason = reason.strip()
319 if version in ("HTTP/1.0", "HTTP/0.9"):
320 # Some servers might still return "0.9", treat it as 1.0 anyway
321 self.version = 10
322 elif version.startswith("HTTP/1."):
323 self.version = 11 # use HTTP/1.1 code for HTTP/1.x where x>=1
324 else:
325 raise UnknownProtocol(version)
326
327 self.headers = self.msg = parse_headers(self.fp)
328
329 if self.debuglevel > 0:
330 for hdr, val in self.headers.items():
331 print("header:", hdr + ":", val)
332
333 # are we using the chunked-style of transfer encoding?
334 tr_enc = self.headers.get("transfer-encoding")
335 if tr_enc and tr_enc.lower() == "chunked":
336 self.chunked = True
337 self.chunk_left = None
338 else:
339 self.chunked = False
340
341 # will the connection close at the end of the response?
342 self.will_close = self._check_close()
343
344 # do we have a Content-Length?
345 # NOTE: RFC 2616, S4.4, #3 says we ignore this if tr_enc is "chunked"
346 self.length = None
347 length = self.headers.get("content-length")
348
349 # are we using the chunked-style of transfer encoding?
350 tr_enc = self.headers.get("transfer-encoding")
351 if length and not self.chunked:
352 try:
353 self.length = int(length)
354 except ValueError:
355 self.length = None
356 else:
357 if self.length < 0: # ignore nonsensical negative lengths
358 self.length = None
359 else:
360 self.length = None
361
362 # does the body have a fixed length? (of zero)
363 if (status == NO_CONTENT or status == NOT_MODIFIED or
364 100 <= status < 200 or # 1xx codes
365 self._method == "HEAD"):
366 self.length = 0
367
368 # if the connection remains open, and we aren't using chunked, and
369 # a content-length was not provided, then assume that the connection
370 # WILL close.
371 if (not self.will_close and
372 not self.chunked and
373 self.length is None):
374 self.will_close = True
375
376 def _check_close(self):
377 conn = self.headers.get("connection")
378 if self.version == 11:
379 # An HTTP/1.1 proxy is assumed to stay open unless
380 # explicitly closed.
381 if conn and "close" in conn.lower():
382 return True
383 return False
384
385 # Some HTTP/1.0 implementations have support for persistent
386 # connections, using rules different than HTTP/1.1.
387
388 # For older HTTP, Keep-Alive indicates persistent connection.
389 if self.headers.get("keep-alive"):
390 return False
391
392 # At least Akamai returns a "Connection: Keep-Alive" header,
393 # which was supposed to be sent by the client.
394 if conn and "keep-alive" in conn.lower():
395 return False
396
397 # Proxy-Connection is a netscape hack.
398 pconn = self.headers.get("proxy-connection")
399 if pconn and "keep-alive" in pconn.lower():
400 return False
401
402 # otherwise, assume it will close
403 return True
404
405 def _close_conn(self):
406 fp = self.fp
407 self.fp = None
408 fp.close()
409
410 def close(self):
411 try:
412 super().close() # set "closed" flag
413 finally:
414 if self.fp:
415 self._close_conn()
416
417 # These implementations are for the benefit of io.BufferedReader.
418
419 # XXX This class should probably be revised to act more like
420 # the "raw stream" that BufferedReader expects.
421
422 def flush(self):
423 super().flush()
424 if self.fp:
425 self.fp.flush()
426
427 def readable(self):
428 """Always returns True"""
429 return True
430
431 # End of "raw stream" methods
432
433 def isclosed(self):
434 """True if the connection is closed."""
435 # NOTE: it is possible that we will not ever call self.close(). This
436 # case occurs when will_close is TRUE, length is None, and we
437 # read up to the last byte, but NOT past it.
438 #
439 # IMPLIES: if will_close is FALSE, then self.close() will ALWAYS be
440 # called, meaning self.isclosed() is meaningful.
441 return self.fp is None
442
443 def read(self, amt=None):
444 if self.fp is None:
445 return b""
446
447 if self._method == "HEAD":
448 self._close_conn()
449 return b""
450
451 if amt is not None:
452 # Amount is given, implement using readinto
453 b = bytearray(amt)
454 n = self.readinto(b)
455 return memoryview(b)[:n].tobytes()
456 else:
457 # Amount is not given (unbounded read) so we must check self.length
458 # and self.chunked
459
460 if self.chunked:
461 return self._readall_chunked()
462
463 if self.length is None:
464 s = self.fp.read()
465 else:
466 try:
467 s = self._safe_read(self.length)
468 except IncompleteRead:
469 self._close_conn()
470 raise
471 self.length = 0
472 self._close_conn() # we read everything
473 return s
474
475 def readinto(self, b):
476 """Read up to len(b) bytes into bytearray b and return the number
477 of bytes read.
478 """
479
480 if self.fp is None:
481 return 0
482
483 if self._method == "HEAD":
484 self._close_conn()
485 return 0
486
487 if self.chunked:
488 return self._readinto_chunked(b)
489
490 if self.length is not None:
491 if len(b) > self.length:
492 # clip the read to the "end of response"
493 b = memoryview(b)[0:self.length]
494
495 # we do not use _safe_read() here because this may be a .will_close
496 # connection, and the user is reading more bytes than will be provided
497 # (for example, reading in 1k chunks)
498 n = self.fp.readinto(b)
499 if not n and b:
500 # Ideally, we would raise IncompleteRead if the content-length
501 # wasn't satisfied, but it might break compatibility.
502 self._close_conn()
503 elif self.length is not None:
504 self.length -= n
505 if not self.length:
506 self._close_conn()
507 return n
508
509 def _read_next_chunk_size(self):
510 # Read the next chunk size from the file
511 line = self.fp.readline(_MAXLINE + 1)
512 if len(line) > _MAXLINE:
513 raise LineTooLong("chunk size")
514 i = line.find(b";")
515 if i >= 0:
516 line = line[:i] # strip chunk-extensions
517 try:
518 return int(line, 16)
519 except ValueError:
520 # close the connection as protocol synchronisation is
521 # probably lost
522 self._close_conn()
523 raise
524
525 def _read_and_discard_trailer(self):
526 # read and discard trailer up to the CRLF terminator
527 ### note: we shouldn't have any trailers!
528 while True:
529 line = self.fp.readline(_MAXLINE + 1)
530 if len(line) > _MAXLINE:
531 raise LineTooLong("trailer line")
532 if not line:
533 # a vanishingly small number of sites EOF without
534 # sending the trailer
535 break
536 if line in (b'\r\n', b'\n', b''):
537 break
538
539 def _get_chunk_left(self):
540 # return self.chunk_left, reading a new chunk if necessary.
541 # chunk_left == 0: at the end of the current chunk, need to close it
542 # chunk_left == None: No current chunk, should read next.
543 # This function returns non-zero or None if the last chunk has
544 # been read.
545 chunk_left = self.chunk_left
546 if not chunk_left: # Can be 0 or None
547 if chunk_left is not None:
548 # We are at the end of chunk, discard chunk end
549 self._safe_read(2) # toss the CRLF at the end of the chunk
550 try:
551 chunk_left = self._read_next_chunk_size()
552 except ValueError:
553 raise IncompleteRead(b'')
554 if chunk_left == 0:
555 # last chunk: 1*("0") [ chunk-extension ] CRLF
556 self._read_and_discard_trailer()
557 # we read everything; close the "file"
558 self._close_conn()
559 chunk_left = None
560 self.chunk_left = chunk_left
561 return chunk_left
562
563 def _readall_chunked(self):
564 assert self.chunked != _UNKNOWN
565 value = []
566 try:
567 while True:
568 chunk_left = self._get_chunk_left()
569 if chunk_left is None:
570 break
571 value.append(self._safe_read(chunk_left))
572 self.chunk_left = 0
573 return b''.join(value)
574 except IncompleteRead:
575 raise IncompleteRead(b''.join(value))
576
577 def _readinto_chunked(self, b):
578 assert self.chunked != _UNKNOWN
579 total_bytes = 0
580 mvb = memoryview(b)
581 try:
582 while True:
583 chunk_left = self._get_chunk_left()
584 if chunk_left is None:
585 return total_bytes
586
587 if len(mvb) <= chunk_left:
588 n = self._safe_readinto(mvb)
589 self.chunk_left = chunk_left - n
590 return total_bytes + n
591
592 temp_mvb = mvb[:chunk_left]
593 n = self._safe_readinto(temp_mvb)
594 mvb = mvb[n:]
595 total_bytes += n
596 self.chunk_left = 0
597
598 except IncompleteRead:
599 raise IncompleteRead(bytes(b[0:total_bytes]))
600
601 def _safe_read(self, amt):
602 """Read the number of bytes requested.
603
604 This function should be used when <amt> bytes "should" be present for
605 reading. If the bytes are truly not available (due to EOF), then the
606 IncompleteRead exception can be used to detect the problem.
607 """
608 data = self.fp.read(amt)
609 if len(data) < amt:
610 raise IncompleteRead(data, amt-len(data))
611 return data
612
613 def _safe_readinto(self, b):
614 """Same as _safe_read, but for reading into a buffer."""
615 amt = len(b)
616 n = self.fp.readinto(b)
617 if n < amt:
618 raise IncompleteRead(bytes(b[:n]), amt-n)
619 return n
620
621 def read1(self, n=-1):
622 """Read with at most one underlying system call. If at least one
623 byte is buffered, return that instead.
624 """
625 if self.fp is None or self._method == "HEAD":
626 return b""
627 if self.chunked:
628 return self._read1_chunked(n)
629 if self.length is not None and (n < 0 or n > self.length):
630 n = self.length
631 result = self.fp.read1(n)
632 if not result and n:
633 self._close_conn()
634 elif self.length is not None:
635 self.length -= len(result)
636 return result
637
638 def peek(self, n=-1):
639 # Having this enables IOBase.readline() to read more than one
640 # byte at a time
641 if self.fp is None or self._method == "HEAD":
642 return b""
643 if self.chunked:
644 return self._peek_chunked(n)
645 return self.fp.peek(n)
646
647 def readline(self, limit=-1):
648 if self.fp is None or self._method == "HEAD":
649 return b""
650 if self.chunked:
651 # Fallback to IOBase readline which uses peek() and read()
652 return super().readline(limit)
653 if self.length is not None and (limit < 0 or limit > self.length):
654 limit = self.length
655 result = self.fp.readline(limit)
656 if not result and limit:
657 self._close_conn()
658 elif self.length is not None:
659 self.length -= len(result)
660 return result
661
662 def _read1_chunked(self, n):
663 # Strictly speaking, _get_chunk_left() may cause more than one read,
664 # but that is ok, since that is to satisfy the chunked protocol.
665 chunk_left = self._get_chunk_left()
666 if chunk_left is None or n == 0:
667 return b''
668 if not (0 <= n <= chunk_left):
669 n = chunk_left # if n is negative or larger than chunk_left
670 read = self.fp.read1(n)
671 self.chunk_left -= len(read)
672 if not read:
673 raise IncompleteRead(b"")
674 return read
675
676 def _peek_chunked(self, n):
677 # Strictly speaking, _get_chunk_left() may cause more than one read,
678 # but that is ok, since that is to satisfy the chunked protocol.
679 try:
680 chunk_left = self._get_chunk_left()
681 except IncompleteRead:
682 return b'' # peek doesn't worry about protocol
683 if chunk_left is None:
684 return b'' # eof
685 # peek is allowed to return more than requested. Just request the
686 # entire chunk, and truncate what we get.
687 return self.fp.peek(chunk_left)[:chunk_left]
688
689 def fileno(self):
690 return self.fp.fileno()
691
692 def getheader(self, name, default=None):
693 '''Returns the value of the header matching *name*.
694
695 If there are multiple matching headers, the values are
696 combined into a single string separated by commas and spaces.
697
698 If no matching header is found, returns *default* or None if
699 the *default* is not specified.
700
701 If the headers are unknown, raises http.client.ResponseNotReady.
702
703 '''
704 if self.headers is None:
705 raise ResponseNotReady()
706 headers = self.headers.get_all(name) or default
707 if isinstance(headers, str) or not hasattr(headers, '__iter__'):
708 return headers
709 else:
710 return ', '.join(headers)
711
712 def getheaders(self):
713 """Return list of (header, value) tuples."""
714 if self.headers is None:
715 raise ResponseNotReady()
716 return list(self.headers.items())
717
718 # We override IOBase.__iter__ so that it doesn't check for closed-ness
719
720 def __iter__(self):
721 return self
722
723 # For compatibility with old-style urllib responses.
724
725 def info(self):
726 '''Returns an instance of the class mimetools.Message containing
727 meta-information associated with the URL.
728
729 When the method is HTTP, these headers are those returned by
730 the server at the head of the retrieved HTML page (including
731 Content-Length and Content-Type).
732
733 When the method is FTP, a Content-Length header will be
734 present if (as is now usual) the server passed back a file
735 length in response to the FTP retrieval request. A
736 Content-Type header will be present if the MIME type can be
737 guessed.
738
739 When the method is local-file, returned headers will include
740 a Date representing the file's last-modified time, a
741 Content-Length giving file size, and a Content-Type
742 containing a guess at the file's type. See also the
743 description of the mimetools module.
744
745 '''
746 return self.headers
747
748 def geturl(self):
749 '''Return the real URL of the page.
750
751 In some cases, the HTTP server redirects a client to another
752 URL. The urlopen() function handles this transparently, but in
753 some cases the caller needs to know which URL the client was
754 redirected to. The geturl() method can be used to get at this
755 redirected URL.
756
757 '''
758 return self.url
759
760 def getcode(self):
761 '''Return the HTTP status code that was sent with the response,
762 or None if the URL is not an HTTP URL.
763
764 '''
765 return self.status
766
767 class HTTPConnection:
768
769 _http_vsn = 11
770 _http_vsn_str = 'HTTP/1.1'
771
772 response_class = HTTPResponse
773 default_port = HTTP_PORT
774 auto_open = 1
775 debuglevel = 0
776
777 @staticmethod
778 def _is_textIO(stream):
779 """Test whether a file-like object is a text or a binary stream.
780 """
781 return isinstance(stream, io.TextIOBase)
782
783 @staticmethod
784 def _get_content_length(body, method):
785 """Get the content-length based on the body.
786
787 If the body is None, we set Content-Length: 0 for methods that expect
788 a body (RFC 7230, Section 3.3.2). We also set the Content-Length for
789 any method if the body is a str or bytes-like object and not a file.
790 """
791 if body is None:
792 # do an explicit check for not None here to distinguish
793 # between unset and set but empty
794 if method.upper() in _METHODS_EXPECTING_BODY:
795 return 0
796 else:
797 return None
798
799 if hasattr(body, 'read'):
800 # file-like object.
801 return None
802
803 try:
804 # does it implement the buffer protocol (bytes, bytearray, array)?
805 mv = memoryview(body)
806 return mv.nbytes
807 except TypeError:
808 pass
809
810 if isinstance(body, str):
811 return len(body)
812
813 return None
814
815 def __init__(self, host, port=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
816 source_address=None, blocksize=8192):
817 self.timeout = timeout
818 self.source_address = source_address
819 self.blocksize = blocksize
820 self.sock = None
821 self._buffer = []
822 self.__response = None
823 self.__state = _CS_IDLE
824 self._method = None
825 self._tunnel_host = None
826 self._tunnel_port = None
827 self._tunnel_headers = {}
828
829 (self.host, self.port) = self._get_hostport(host, port)
830
831 # This is stored as an instance variable to allow unit
832 # tests to replace it with a suitable mockup
833 self._create_connection = socket.create_connection
834
835 def set_tunnel(self, host, port=None, headers=None):
836 """Set up host and port for HTTP CONNECT tunnelling.
837
838 In a connection that uses HTTP CONNECT tunneling, the host passed to the
839 constructor is used as a proxy server that relays all communication to
840 the endpoint passed to `set_tunnel`. This done by sending an HTTP
841 CONNECT request to the proxy server when the connection is established.
842
843 This method must be called before the HTML connection has been
844 established.
845
846 The headers argument should be a mapping of extra HTTP headers to send
847 with the CONNECT request.
848 """
849
850 if self.sock:
851 raise RuntimeError("Can't set up tunnel for established connection")
852
853 self._tunnel_host, self._tunnel_port = self._get_hostport(host, port)
854 if headers:
855 self._tunnel_headers = headers
856 else:
857 self._tunnel_headers.clear()
858
859 def _get_hostport(self, host, port):
860 if port is None:
861 i = host.rfind(':')
862 j = host.rfind(']') # ipv6 addresses have [...]
863 if i > j:
864 try:
865 port = int(host[i+1:])
866 except ValueError:
867 if host[i+1:] == "": # http://foo.com:/ == http://foo.com/
868 port = self.default_port
869 else:
870 raise InvalidURL("nonnumeric port: '%s'" % host[i+1:])
871 host = host[:i]
872 else:
873 port = self.default_port
874 if host and host[0] == '[' and host[-1] == ']':
875 host = host[1:-1]
876
877 return (host, port)
878
879 def set_debuglevel(self, level):
880 self.debuglevel = level
881
882 def _tunnel(self):
883 connect_str = "CONNECT %s:%d HTTP/1.0\r\n" % (self._tunnel_host,
884 self._tunnel_port)
885 connect_bytes = connect_str.encode("ascii")
886 self.send(connect_bytes)
887 for header, value in self._tunnel_headers.items():
888 header_str = "%s: %s\r\n" % (header, value)
889 header_bytes = header_str.encode("latin-1")
890 self.send(header_bytes)
891 self.send(b'\r\n')
892
893 response = self.response_class(self.sock, method=self._method)
894 (version, code, message) = response._read_status()
895
896 if code != http.HTTPStatus.OK:
897 self.close()
898 raise OSError("Tunnel connection failed: %d %s" % (code,
899 message.strip()))
900 while True:
901 line = response.fp.readline(_MAXLINE + 1)
902 if len(line) > _MAXLINE:
903 raise LineTooLong("header line")
904 if not line:
905 # for sites which EOF without sending a trailer
906 break
907 if line in (b'\r\n', b'\n', b''):
908 break
909
910 if self.debuglevel > 0:
911 print('header:', line.decode())
912
913 def connect(self):
914 """Connect to the host and port specified in __init__."""
915 self.sock = self._create_connection(
916 (self.host,self.port), self.timeout, self.source_address)
917 self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
918
919 if self._tunnel_host:
920 self._tunnel()
921
922 def close(self):
923 """Close the connection to the HTTP server."""
924 self.__state = _CS_IDLE
925 try:
926 sock = self.sock
927 if sock:
928 self.sock = None
929 sock.close() # close it manually... there may be other refs
930 finally:
931 response = self.__response
932 if response:
933 self.__response = None
934 response.close()
935
936 def send(self, data):
937 """Send `data' to the server.
938 ``data`` can be a string object, a bytes object, an array object, a
939 file-like object that supports a .read() method, or an iterable object.
940 """
941
942 if self.sock is None:
943 if self.auto_open:
944 self.connect()
945 else:
946 raise NotConnected()
947
948 if self.debuglevel > 0:
949 print("send:", repr(data))
950 if hasattr(data, "read") :
951 if self.debuglevel > 0:
952 print("sendIng a read()able")
953 encode = self._is_textIO(data)
954 if encode and self.debuglevel > 0:
955 print("encoding file using iso-8859-1")
956 while 1:
957 datablock = data.read(self.blocksize)
958 if not datablock:
959 break
960 if encode:
961 datablock = datablock.encode("iso-8859-1")
962 self.sock.sendall(datablock)
963 return
964 try:
965 self.sock.sendall(data)
966 except TypeError:
967 if isinstance(data, collections.abc.Iterable):
968 for d in data:
969 self.sock.sendall(d)
970 else:
971 raise TypeError("data should be a bytes-like object "
972 "or an iterable, got %r" % type(data))
973
974 def _output(self, s):
975 """Add a line of output to the current request buffer.
976
977 Assumes that the line does *not* end with \\r\\n.
978 """
979 self._buffer.append(s)
980
981 def _read_readable(self, readable):
982 if self.debuglevel > 0:
983 print("sendIng a read()able")
984 encode = self._is_textIO(readable)
985 if encode and self.debuglevel > 0:
986 print("encoding file using iso-8859-1")
987 while True:
988 datablock = readable.read(self.blocksize)
989 if not datablock:
990 break
991 if encode:
992 datablock = datablock.encode("iso-8859-1")
993 yield datablock
994
995 def _send_output(self, message_body=None, encode_chunked=False):
996 """Send the currently buffered request and clear the buffer.
997
998 Appends an extra \\r\\n to the buffer.
999 A message_body may be specified, to be appended to the request.
1000 """
1001 self._buffer.extend((b"", b""))
1002 msg = b"\r\n".join(self._buffer)
1003 del self._buffer[:]
1004 self.send(msg)
1005
1006 if message_body is not None:
1007
1008 # create a consistent interface to message_body
1009 if hasattr(message_body, 'read'):
1010 # Let file-like take precedence over byte-like. This
1011 # is needed to allow the current position of mmap'ed
1012 # files to be taken into account.
1013 chunks = self._read_readable(message_body)
1014 else:
1015 try:
1016 # this is solely to check to see if message_body
1017 # implements the buffer API. it /would/ be easier
1018 # to capture if PyObject_CheckBuffer was exposed
1019 # to Python.
1020 memoryview(message_body)
1021 except TypeError:
1022 try:
1023 chunks = iter(message_body)
1024 except TypeError:
1025 raise TypeError("message_body should be a bytes-like "
1026 "object or an iterable, got %r"
1027 % type(message_body))
1028 else:
1029 # the object implements the buffer interface and
1030 # can be passed directly into socket methods
1031 chunks = (message_body,)
1032
1033 for chunk in chunks:
1034 if not chunk:
1035 if self.debuglevel > 0:
1036 print('Zero length chunk ignored')
1037 continue
1038
1039 if encode_chunked and self._http_vsn == 11:
1040 # chunked encoding
1041 chunk = f'{len(chunk):X}\r\n'.encode('ascii') + chunk \
1042 + b'\r\n'
1043 self.send(chunk)
1044
1045 if encode_chunked and self._http_vsn == 11:
1046 # end chunked transfer
1047 self.send(b'0\r\n\r\n')
1048
1049 def putrequest(self, method, url, skip_host=False,
1050 skip_accept_encoding=False):
1051 """Send a request to the server.
1052
1053 `method' specifies an HTTP request method, e.g. 'GET'.
1054 `url' specifies the object being requested, e.g. '/index.html'.
1055 `skip_host' if True does not add automatically a 'Host:' header
1056 `skip_accept_encoding' if True does not add automatically an
1057 'Accept-Encoding:' header
1058 """
1059
1060 # if a prior response has been completed, then forget about it.
1061 if self.__response and self.__response.isclosed():
1062 self.__response = None
1063
1064
1065 # in certain cases, we cannot issue another request on this connection.
1066 # this occurs when:
1067 # 1) we are in the process of sending a request. (_CS_REQ_STARTED)
1068 # 2) a response to a previous request has signalled that it is going
1069 # to close the connection upon completion.
1070 # 3) the headers for the previous response have not been read, thus
1071 # we cannot determine whether point (2) is true. (_CS_REQ_SENT)
1072 #
1073 # if there is no prior response, then we can request at will.
1074 #
1075 # if point (2) is true, then we will have passed the socket to the
1076 # response (effectively meaning, "there is no prior response"), and
1077 # will open a new one when a new request is made.
1078 #
1079 # Note: if a prior response exists, then we *can* start a new request.
1080 # We are not allowed to begin fetching the response to this new
1081 # request, however, until that prior response is complete.
1082 #
1083 if self.__state == _CS_IDLE:
1084 self.__state = _CS_REQ_STARTED
1085 else:
1086 raise CannotSendRequest(self.__state)
1087
1088 # Save the method for use later in the response phase
1089 self._method = method
1090
1091 url = url or '/'
1092 self._validate_path(url)
1093
1094 request = '%s %s %s' % (method, url, self._http_vsn_str)
1095
1096 self._output(self._encode_request(request))
1097
1098 if self._http_vsn == 11:
1099 # Issue some standard headers for better HTTP/1.1 compliance
1100
1101 if not skip_host:
1102 # this header is issued *only* for HTTP/1.1
1103 # connections. more specifically, this means it is
1104 # only issued when the client uses the new
1105 # HTTPConnection() class. backwards-compat clients
1106 # will be using HTTP/1.0 and those clients may be
1107 # issuing this header themselves. we should NOT issue
1108 # it twice; some web servers (such as Apache) barf
1109 # when they see two Host: headers
1110
1111 # If we need a non-standard port,include it in the
1112 # header. If the request is going through a proxy,
1113 # but the host of the actual URL, not the host of the
1114 # proxy.
1115
1116 netloc = ''
1117 if url.startswith('http'):
1118 nil, netloc, nil, nil, nil = urlsplit(url)
1119
1120 if netloc:
1121 try:
1122 netloc_enc = netloc.encode("ascii")
1123 except UnicodeEncodeError:
1124 netloc_enc = netloc.encode("idna")
1125 self.putheader('Host', netloc_enc)
1126 else:
1127 if self._tunnel_host:
1128 host = self._tunnel_host
1129 port = self._tunnel_port
1130 else:
1131 host = self.host
1132 port = self.port
1133
1134 try:
1135 host_enc = host.encode("ascii")
1136 except UnicodeEncodeError:
1137 host_enc = host.encode("idna")
1138
1139 # As per RFC 273, IPv6 address should be wrapped with []
1140 # when used as Host header
1141
1142 if host.find(':') >= 0:
1143 host_enc = b'[' + host_enc + b']'
1144
1145 if port == self.default_port:
1146 self.putheader('Host', host_enc)
1147 else:
1148 host_enc = host_enc.decode("ascii")
1149 self.putheader('Host', "%s:%s" % (host_enc, port))
1150
1151 # note: we are assuming that clients will not attempt to set these
1152 # headers since *this* library must deal with the
1153 # consequences. this also means that when the supporting
1154 # libraries are updated to recognize other forms, then this
1155 # code should be changed (removed or updated).
1156
1157 # we only want a Content-Encoding of "identity" since we don't
1158 # support encodings such as x-gzip or x-deflate.
1159 if not skip_accept_encoding:
1160 self.putheader('Accept-Encoding', 'identity')
1161
1162 # we can accept "chunked" Transfer-Encodings, but no others
1163 # NOTE: no TE header implies *only* "chunked"
1164 #self.putheader('TE', 'chunked')
1165
1166 # if TE is supplied in the header, then it must appear in a
1167 # Connection header.
1168 #self.putheader('Connection', 'TE')
1169
1170 else:
1171 # For HTTP/1.0, the server will assume "not chunked"
1172 pass
1173
1174 def _encode_request(self, request):
1175 # ASCII also helps prevent CVE-2019-9740.
1176 return request.encode('ascii')
1177
1178 def _validate_path(self, url):
1179 """Validate a url for putrequest."""
1180 # Prevent CVE-2019-9740.
1181 match = _contains_disallowed_url_pchar_re.search(url)
1182 if match:
1183 raise InvalidURL(f"URL can't contain control characters. {url!r} "
1184 f"(found at least {match.group()!r})")
1185
1186 def putheader(self, header, *values):
1187 """Send a request header line to the server.
1188
1189 For example: h.putheader('Accept', 'text/html')
1190 """
1191 if self.__state != _CS_REQ_STARTED:
1192 raise CannotSendHeader()
1193
1194 if hasattr(header, 'encode'):
1195 header = header.encode('ascii')
1196
1197 if not _is_legal_header_name(header):
1198 raise ValueError('Invalid header name %r' % (header,))
1199
1200 values = list(values)
1201 for i, one_value in enumerate(values):
1202 if hasattr(one_value, 'encode'):
1203 values[i] = one_value.encode('latin-1')
1204 elif isinstance(one_value, int):
1205 values[i] = str(one_value).encode('ascii')
1206
1207 if _is_illegal_header_value(values[i]):
1208 raise ValueError('Invalid header value %r' % (values[i],))
1209
1210 value = b'\r\n\t'.join(values)
1211 header = header + b': ' + value
1212 self._output(header)
1213
1214 def endheaders(self, message_body=None, *, encode_chunked=False):
1215 """Indicate that the last header line has been sent to the server.
1216
1217 This method sends the request to the server. The optional message_body
1218 argument can be used to pass a message body associated with the
1219 request.
1220 """
1221 if self.__state == _CS_REQ_STARTED:
1222 self.__state = _CS_REQ_SENT
1223 else:
1224 raise CannotSendHeader()
1225 self._send_output(message_body, encode_chunked=encode_chunked)
1226
1227 def request(self, method, url, body=None, headers={}, *,
1228 encode_chunked=False):
1229 """Send a complete request to the server."""
1230 self._send_request(method, url, body, headers, encode_chunked)
1231
1232 def _send_request(self, method, url, body, headers, encode_chunked):
1233 # Honor explicitly requested Host: and Accept-Encoding: headers.
1234 header_names = frozenset(k.lower() for k in headers)
1235 skips = {}
1236 if 'host' in header_names:
1237 skips['skip_host'] = 1
1238 if 'accept-encoding' in header_names:
1239 skips['skip_accept_encoding'] = 1
1240
1241 self.putrequest(method, url, **skips)
1242
1243 # chunked encoding will happen if HTTP/1.1 is used and either
1244 # the caller passes encode_chunked=True or the following
1245 # conditions hold:
1246 # 1. content-length has not been explicitly set
1247 # 2. the body is a file or iterable, but not a str or bytes-like
1248 # 3. Transfer-Encoding has NOT been explicitly set by the caller
1249
1250 if 'content-length' not in header_names:
1251 # only chunk body if not explicitly set for backwards
1252 # compatibility, assuming the client code is already handling the
1253 # chunking
1254 if 'transfer-encoding' not in header_names:
1255 # if content-length cannot be automatically determined, fall
1256 # back to chunked encoding
1257 encode_chunked = False
1258 content_length = self._get_content_length(body, method)
1259 if content_length is None:
1260 if body is not None:
1261 if self.debuglevel > 0:
1262 print('Unable to determine size of %r' % body)
1263 encode_chunked = True
1264 self.putheader('Transfer-Encoding', 'chunked')
1265 else:
1266 self.putheader('Content-Length', str(content_length))
1267 else:
1268 encode_chunked = False
1269
1270 for hdr, value in headers.items():
1271 self.putheader(hdr, value)
1272 if isinstance(body, str):
1273 # RFC 2616 Section 3.7.1 says that text default has a
1274 # default charset of iso-8859-1.
1275 body = _encode(body, 'body')
1276 self.endheaders(body, encode_chunked=encode_chunked)
1277
1278 def getresponse(self):
1279 """Get the response from the server.
1280
1281 If the HTTPConnection is in the correct state, returns an
1282 instance of HTTPResponse or of whatever object is returned by
1283 the response_class variable.
1284
1285 If a request has not been sent or if a previous response has
1286 not be handled, ResponseNotReady is raised. If the HTTP
1287 response indicates that the connection should be closed, then
1288 it will be closed before the response is returned. When the
1289 connection is closed, the underlying socket is closed.
1290 """
1291
1292 # if a prior response has been completed, then forget about it.
1293 if self.__response and self.__response.isclosed():
1294 self.__response = None
1295
1296 # if a prior response exists, then it must be completed (otherwise, we
1297 # cannot read this response's header to determine the connection-close
1298 # behavior)
1299 #
1300 # note: if a prior response existed, but was connection-close, then the
1301 # socket and response were made independent of this HTTPConnection
1302 # object since a new request requires that we open a whole new
1303 # connection
1304 #
1305 # this means the prior response had one of two states:
1306 # 1) will_close: this connection was reset and the prior socket and
1307 # response operate independently
1308 # 2) persistent: the response was retained and we await its
1309 # isclosed() status to become true.
1310 #
1311 if self.__state != _CS_REQ_SENT or self.__response:
1312 raise ResponseNotReady(self.__state)
1313
1314 if self.debuglevel > 0:
1315 response = self.response_class(self.sock, self.debuglevel,
1316 method=self._method)
1317 else:
1318 response = self.response_class(self.sock, method=self._method)
1319
1320 try:
1321 try:
1322 response.begin()
1323 except ConnectionError:
1324 self.close()
1325 raise
1326 assert response.will_close != _UNKNOWN
1327 self.__state = _CS_IDLE
1328
1329 if response.will_close:
1330 # this effectively passes the connection to the response
1331 self.close()
1332 else:
1333 # remember this, so we can tell when it is complete
1334 self.__response = response
1335
1336 return response
1337 except:
1338 response.close()
1339 raise
1340
1341 try:
1342 import ssl
1343 except ImportError:
1344 pass
1345 else:
1346 class HTTPSConnection(HTTPConnection):
1347 "This class allows communication via SSL."
1348
1349 default_port = HTTPS_PORT
1350
1351 # XXX Should key_file and cert_file be deprecated in favour of context?
1352
1353 def __init__(self, host, port=None, key_file=None, cert_file=None,
1354 timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
1355 source_address=None, *, context=None,
1356 check_hostname=None, blocksize=8192):
1357 super(HTTPSConnection, self).__init__(host, port, timeout,
1358 source_address,
1359 blocksize=blocksize)
1360 if (key_file is not None or cert_file is not None or
1361 check_hostname is not None):
1362 import warnings
1363 warnings.warn("key_file, cert_file and check_hostname are "
1364 "deprecated, use a custom context instead.",
1365 DeprecationWarning, 2)
1366 self.key_file = key_file
1367 self.cert_file = cert_file
1368 if context is None:
1369 context = ssl._create_default_https_context()
1370 # enable PHA for TLS 1.3 connections if available
1371 if context.post_handshake_auth is not None:
1372 context.post_handshake_auth = True
1373 will_verify = context.verify_mode != ssl.CERT_NONE
1374 if check_hostname is None:
1375 check_hostname = context.check_hostname
1376 if check_hostname and not will_verify:
1377 raise ValueError("check_hostname needs a SSL context with "
1378 "either CERT_OPTIONAL or CERT_REQUIRED")
1379 if key_file or cert_file:
1380 context.load_cert_chain(cert_file, key_file)
1381 # cert and key file means the user wants to authenticate.
1382 # enable TLS 1.3 PHA implicitly even for custom contexts.
1383 if context.post_handshake_auth is not None:
1384 context.post_handshake_auth = True
1385 self._context = context
1386 if check_hostname is not None:
1387 self._context.check_hostname = check_hostname
1388
1389 def connect(self):
1390 "Connect to a host on a given (SSL) port."
1391
1392 super().connect()
1393
1394 if self._tunnel_host:
1395 server_hostname = self._tunnel_host
1396 else:
1397 server_hostname = self.host
1398
1399 self.sock = self._context.wrap_socket(self.sock,
1400 server_hostname=server_hostname)
1401
1402 __all__.append("HTTPSConnection")
1403
1404 class HTTPException(Exception):
1405 # Subclasses that define an __init__ must call Exception.__init__
1406 # or define self.args. Otherwise, str() will fail.
1407 pass
1408
1409 class NotConnected(HTTPException):
1410 pass
1411
1412 class InvalidURL(HTTPException):
1413 pass
1414
1415 class UnknownProtocol(HTTPException):
1416 def __init__(self, version):
1417 self.args = version,
1418 self.version = version
1419
1420 class UnknownTransferEncoding(HTTPException):
1421 pass
1422
1423 class UnimplementedFileMode(HTTPException):
1424 pass
1425
1426 class IncompleteRead(HTTPException):
1427 def __init__(self, partial, expected=None):
1428 self.args = partial,
1429 self.partial = partial
1430 self.expected = expected
1431 def __repr__(self):
1432 if self.expected is not None:
1433 e = ', %i more expected' % self.expected
1434 else:
1435 e = ''
1436 return '%s(%i bytes read%s)' % (self.__class__.__name__,
1437 len(self.partial), e)
1438 __str__ = object.__str__
1439
1440 class ImproperConnectionState(HTTPException):
1441 pass
1442
1443 class CannotSendRequest(ImproperConnectionState):
1444 pass
1445
1446 class CannotSendHeader(ImproperConnectionState):
1447 pass
1448
1449 class ResponseNotReady(ImproperConnectionState):
1450 pass
1451
1452 class BadStatusLine(HTTPException):
1453 def __init__(self, line):
1454 if not line:
1455 line = repr(line)
1456 self.args = line,
1457 self.line = line
1458
1459 class LineTooLong(HTTPException):
1460 def __init__(self, line_type):
1461 HTTPException.__init__(self, "got more than %d bytes when reading %s"
1462 % (_MAXLINE, line_type))
1463
1464 class RemoteDisconnected(ConnectionResetError, BadStatusLine):
1465 def __init__(self, *pos, **kw):
1466 BadStatusLine.__init__(self, "")
1467 ConnectionResetError.__init__(self, *pos, **kw)
1468
1469 # for backwards compatibility
1470 error = HTTPException