diff --git a/test/functional/interface_http.py b/test/functional/interface_http.py index dbdceb52d15..a10132e2642 100755 --- a/test/functional/interface_http.py +++ b/test/functional/interface_http.py @@ -8,6 +8,7 @@ from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal, str_to_b64str import http.client +import time import urllib.parse class HTTPBasicsTest (BitcoinTestFramework): @@ -105,5 +106,136 @@ class HTTPBasicsTest (BitcoinTestFramework): assert_equal(out1.status, http.client.BAD_REQUEST) + self.log.info("Check pipelining") + # Requests are responded to in order they were received + # See https://www.rfc-editor.org/rfc/rfc7230#section-6.3.2 + tip_height = self.nodes[2].getblockcount() + + req = "POST / HTTP/1.1\r\n" + req += f'Authorization: Basic {str_to_b64str(authpair)}\r\n' + + # First request will take a long time to process + body1 = f'{{"method": "waitforblockheight", "params": [{tip_height + 1}]}}' + req1 = req + req1 += f'Content-Length: {len(body1)}\r\n\r\n' + req1 += body1 + + # Second request will process very fast + body2 = '{"method": "getblockcount"}' + req2 = req + req2 += f'Content-Length: {len(body2)}\r\n\r\n' + req2 += body2 + # Get the underlying socket from HTTP connection so we can send something unusual + conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port) + conn.connect() + sock = conn.sock + sock.settimeout(5) + # Send two requests in a row. The first will block the second indefinitely + sock.sendall(req1.encode("utf-8")) + sock.sendall(req2.encode("utf-8")) + try: + # The server should not respond to the fast, second request + # until the (very) slow first request has been handled: + res = sock.recv(1024) + assert False + except TimeoutError: + pass + + # Use a separate http connection to generate a block + self.generate(self.nodes[2], 1, sync_fun=self.no_op) + + # Wait for two responses to be received + res = b"" + while res.count(b"result") != 2: + res += sock.recv(1024) + + # waitforblockheight was responded to first, and then getblockcount + # which includes the block added after the request was made + chunks = res.split(b'"result":') + assert chunks[1].startswith(b'{"hash":') + assert chunks[2].startswith(bytes(f'{tip_height + 1}', 'utf8')) + + + self.log.info("Check HTTP request encoded with chunked transfer") + headers_chunked = headers.copy() + headers_chunked.update({"Transfer-encoding": "chunked"}) + body_chunked = [ + b'{"method": "submitblock", "params": ["', + b'0' * 1000000, + b'1' * 1000000, + b'2' * 1000000, + b'3' * 1000000, + b'"]}' + ] + conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port) + conn.connect() + conn.request( + method='POST', + url='/', + body=iter(body_chunked), + headers=headers_chunked, + encode_chunked=True) + out1 = conn.getresponse().read() + assert_equal(out1, b'{"result":"high-hash","error":null}\n') + + + self.log.info("Check -rpcservertimeout") + # The test framework typically reuses a single persistent HTTP connection + # for all RPCs to a TestNode. Because we are setting -rpcservertimeout + # so low on this one node, its connection will quickly timeout and get dropped by + # the server. Negating this setting will force the AuthServiceProxy + # for this node to create a fresh new HTTP connection for every command + # called for the remainder of this test. + self.nodes[2].reuse_http_connections = False + + self.restart_node(2, extra_args=["-rpcservertimeout=2"]) + # This is the amount of time the server will wait for a client to + # send a complete request. Test it by sending an incomplete but + # so-far otherwise well-formed HTTP request, and never finishing it. + + # Copied from http_incomplete_test_() in regress_http.c in libevent. + # A complete request would have an additional "\r\n" at the end. + http_request = "GET /test1 HTTP/1.1\r\nHost: somehost\r\n" + + # Get the underlying socket from HTTP connection so we can send something unusual + conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port) + conn.connect() + sock = conn.sock + sock.sendall(http_request.encode("utf-8")) + # Wait for response, but expect a timeout disconnection after 1 second + start = time.time() + res = sock.recv(1024) + stop = time.time() + # Server disconnected with EOF + assert_equal(res, b"") + # Server disconnected within an acceptable range of time: + # not immediately, and not too far over the configured duration. + # This allows for some jitter in the test between client and server. + duration = stop - start + assert duration <= 4, f"Server disconnected too slow: {duration} > 4" + assert duration >= 1, f"Server disconnected too fast: {duration} < 1" + + # The connection is definitely closed. + got_expected_error = False + try: + conn.request('GET', '/') + conn.getresponse() + # macos/linux windows + except (ConnectionResetError, ConnectionAbortedError): + got_expected_error = True + assert got_expected_error + + # Sanity check + http_request = "GET /test2 HTTP/1.1\r\nHost: somehost\r\n\r\n" + conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port) + conn.connect() + sock = conn.sock + sock.sendall(http_request.encode("utf-8")) + res = sock.recv(1024) + assert res.startswith(b"HTTP/1.1 404 Not Found") + # still open + conn.request('GET', '/') + conn.getresponse() + if __name__ == '__main__': HTTPBasicsTest(__file__).main() diff --git a/test/functional/test_framework/authproxy.py b/test/functional/test_framework/authproxy.py index e24e5f7312a..9b2fc0f7f9e 100644 --- a/test/functional/test_framework/authproxy.py +++ b/test/functional/test_framework/authproxy.py @@ -75,6 +75,7 @@ class AuthServiceProxy(): self.__service_url = service_url self._service_name = service_name self.ensure_ascii = ensure_ascii # can be toggled on the fly by tests + self.reuse_http_connections = True self.__url = urllib.parse.urlparse(service_url) user = None if self.__url.username is None else self.__url.username.encode('utf8') passwd = None if self.__url.password is None else self.__url.password.encode('utf8') @@ -92,6 +93,8 @@ class AuthServiceProxy(): raise AttributeError if self._service_name is not None: name = "%s.%s" % (self._service_name, name) + if not self.reuse_http_connections: + self._set_conn() return AuthServiceProxy(self.__service_url, name, connection=self.__conn) def _request(self, method, path, postdata): @@ -102,6 +105,8 @@ class AuthServiceProxy(): 'User-Agent': USER_AGENT, 'Authorization': self.__auth_header, 'Content-type': 'application/json'} + if not self.reuse_http_connections: + self._set_conn() self.__conn.request(method, path, postdata, headers) return self._get_response() diff --git a/test/functional/test_framework/test_node.py b/test/functional/test_framework/test_node.py index 10d5337ea00..9916fd7f9ea 100755 --- a/test/functional/test_framework/test_node.py +++ b/test/functional/test_framework/test_node.py @@ -154,6 +154,7 @@ class TestNode(): self.process = None self.rpc_connected = False self.rpc = None + self.reuse_http_connections = True # Must be set before calling get_rpc_proxy() i.e. before restarting node self.url = None self.log = logging.getLogger('TestFramework.node%d' % i) # Cache perf subprocesses here by their data output filename. @@ -285,6 +286,7 @@ class TestNode(): timeout=self.rpc_timeout // 2, # Shorter timeout to allow for one retry in case of ETIMEDOUT coveragedir=self.coverage_dir, ) + rpc.auth_service_proxy_instance.reuse_http_connections = self.reuse_http_connections rpc.getblockcount() # If the call to getblockcount() succeeds then the RPC connection is up if self.version_is_at_least(190000) and wait_for_import: