Changeset View
Changeset View
Standalone View
Standalone View
test/functional/test_framework/util.py
Show First 20 Lines • Show All 128 Lines • ▼ Show 20 Lines | |||||
def try_rpc(code, message, fun, *args, **kwds): | def try_rpc(code, message, fun, *args, **kwds): | ||||
"""Tries to run an rpc command. | """Tries to run an rpc command. | ||||
Test against error code and message if the rpc fails. | Test against error code and message if the rpc fails. | ||||
Returns whether a JSONRPCException was raised.""" | Returns whether a JSONRPCException was raised.""" | ||||
try: | try: | ||||
fun(*args, **kwds) | fun(*args, **kwds) | ||||
except JSONRPCException as e: | except JSONRPCException as e: | ||||
# JSONRPCException was thrown as expected. Check the code and message values are correct. | # JSONRPCException was thrown as expected. Check the code and message | ||||
# values are correct. | |||||
if (code is not None) and (code != e.error["code"]): | if (code is not None) and (code != e.error["code"]): | ||||
raise AssertionError( | raise AssertionError( | ||||
"Unexpected JSONRPC error code {}".format(e.error["code"])) | "Unexpected JSONRPC error code {}".format(e.error["code"])) | ||||
if (message is not None) and (message not in e.error['message']): | if (message is not None) and (message not in e.error['message']): | ||||
raise AssertionError( | raise AssertionError( | ||||
"Expected substring not found:" + e.error['message']) | "Expected substring not found:" + e.error['message']) | ||||
return True | return True | ||||
except Exception as e: | except Exception as e: | ||||
Show All 18 Lines | def assert_is_hash_string(string, length=64): | ||||
elif length and len(string) != length: | elif length and len(string) != length: | ||||
raise AssertionError( | raise AssertionError( | ||||
"String of length {} expected; got {}".format(length, len(string))) | "String of length {} expected; got {}".format(length, len(string))) | ||||
elif not re.match('[abcdef0-9]+$', string): | elif not re.match('[abcdef0-9]+$', string): | ||||
raise AssertionError( | raise AssertionError( | ||||
"String {!r} contains invalid characters for a hash.".format(string)) | "String {!r} contains invalid characters for a hash.".format(string)) | ||||
def assert_array_result(object_array, to_match, expected, should_not_find=False): | def assert_array_result(object_array, to_match, expected, | ||||
should_not_find=False): | |||||
""" | """ | ||||
Pass in array of JSON objects, a dictionary with key/value pairs | Pass in array of JSON objects, a dictionary with key/value pairs | ||||
to match against, and another dictionary with expected key/value | to match against, and another dictionary with expected key/value | ||||
pairs. | pairs. | ||||
If the should_not_find flag is true, to_match should not be found | If the should_not_find flag is true, to_match should not be found | ||||
in object_array | in object_array | ||||
""" | """ | ||||
if should_not_find: | if should_not_find: | ||||
▲ Show 20 Lines • Show All 49 Lines • ▼ Show 20 Lines | |||||
def str_to_b64str(string): | def str_to_b64str(string): | ||||
return b64encode(string.encode('utf-8')).decode('ascii') | return b64encode(string.encode('utf-8')).decode('ascii') | ||||
def satoshi_round(amount): | def satoshi_round(amount): | ||||
return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN) | return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN) | ||||
def wait_until(predicate, *, attempts=float('inf'), timeout=float('inf'), lock=None): | def wait_until(predicate, *, attempts=float('inf'), | ||||
timeout=float('inf'), lock=None): | |||||
if attempts == float('inf') and timeout == float('inf'): | if attempts == float('inf') and timeout == float('inf'): | ||||
timeout = 60 | timeout = 60 | ||||
attempt = 0 | attempt = 0 | ||||
time_end = time.time() + timeout | time_end = time.time() + timeout | ||||
while attempt < attempts and time.time() < time_end: | while attempt < attempts and time.time() < time_end: | ||||
if lock: | if lock: | ||||
with lock: | with lock: | ||||
▲ Show 20 Lines • Show All 56 Lines • ▼ Show 20 Lines | def get_rpc_proxy(url, node_number, timeout=None, coveragedir=None): | ||||
coverage_logfile = coverage.get_filename( | coverage_logfile = coverage.get_filename( | ||||
coveragedir, node_number) if coveragedir else None | coveragedir, node_number) if coveragedir else None | ||||
return coverage.AuthServiceProxyWrapper(proxy, coverage_logfile) | return coverage.AuthServiceProxyWrapper(proxy, coverage_logfile) | ||||
def p2p_port(n): | def p2p_port(n): | ||||
assert n <= MAX_NODES | assert n <= MAX_NODES | ||||
return PORT_MIN + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES) | return PORT_MIN + n + \ | ||||
(MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES) | |||||
def rpc_port(n): | def rpc_port(n): | ||||
return PORT_MIN + PORT_RANGE + n + (MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES) | return PORT_MIN + PORT_RANGE + n + \ | ||||
(MAX_NODES * PortSeed.n) % (PORT_RANGE - 1 - MAX_NODES) | |||||
def rpc_url(datadir, host, port): | def rpc_url(datadir, host, port): | ||||
rpc_u, rpc_p = get_auth_cookie(datadir) | rpc_u, rpc_p = get_auth_cookie(datadir) | ||||
if host is None: | if host is None: | ||||
host = '127.0.0.1' | host = '127.0.0.1' | ||||
return "http://{}:{}@{}:{}".format(rpc_u, rpc_p, host, int(port)) | return "http://{}:{}@{}:{}".format(rpc_u, rpc_p, host, int(port)) | ||||
▲ Show 20 Lines • Show All 63 Lines • ▼ Show 20 Lines | |||||
def set_node_times(nodes, t): | def set_node_times(nodes, t): | ||||
for node in nodes: | for node in nodes: | ||||
node.setmocktime(t) | node.setmocktime(t) | ||||
def disconnect_nodes(from_node, to_node): | def disconnect_nodes(from_node, to_node): | ||||
for peer_id in [peer['id'] for peer in from_node.getpeerinfo() if to_node.name in peer['subver']]: | for peer_id in [peer['id'] for peer in from_node.getpeerinfo( | ||||
) if to_node.name in peer['subver']]: | |||||
try: | try: | ||||
from_node.disconnectnode(nodeid=peer_id) | from_node.disconnectnode(nodeid=peer_id) | ||||
except JSONRPCException as e: | except JSONRPCException as e: | ||||
# If this node is disconnected between calculating the peer id | # If this node is disconnected between calculating the peer id | ||||
# and issuing the disconnect, don't worry about it. | # and issuing the disconnect, don't worry about it. | ||||
# This avoids a race condition if we're mass-disconnecting peers. | # This avoids a race condition if we're mass-disconnecting peers. | ||||
if e.error['code'] != -29: # RPC_CLIENT_NODE_NOT_CONNECTED | if e.error['code'] != -29: # RPC_CLIENT_NODE_NOT_CONNECTED | ||||
raise | raise | ||||
# wait to disconnect | # wait to disconnect | ||||
wait_until(lambda: [peer['id'] for peer in from_node.getpeerinfo( | wait_until(lambda: [peer['id'] for peer in from_node.getpeerinfo( | ||||
) if to_node.name in peer['subver']] == [], timeout=5) | ) if to_node.name in peer['subver']] == [], timeout=5) | ||||
def connect_nodes(from_node, to_node): | def connect_nodes(from_node, to_node): | ||||
host = to_node.host | host = to_node.host | ||||
if host is None: | if host is None: | ||||
host = '127.0.0.1' | host = '127.0.0.1' | ||||
ip_port = host + ':' + str(to_node.p2p_port) | ip_port = host + ':' + str(to_node.p2p_port) | ||||
from_node.addnode(ip_port, "onetry") | from_node.addnode(ip_port, "onetry") | ||||
# poll until version handshake complete to avoid race conditions | # poll until version handshake complete to avoid race conditions | ||||
# with transaction relaying | # with transaction relaying | ||||
wait_until(lambda: all(peer['version'] != | wait_until(lambda: all(peer['version'] | ||||
0 for peer in from_node.getpeerinfo())) | != 0 for peer in from_node.getpeerinfo())) | ||||
def connect_nodes_bi(a, b): | def connect_nodes_bi(a, b): | ||||
connect_nodes(a, b) | connect_nodes(a, b) | ||||
connect_nodes(b, a) | connect_nodes(b, a) | ||||
def sync_blocks(rpc_connections, *, wait=1, timeout=60): | def sync_blocks(rpc_connections, *, wait=1, timeout=60): | ||||
Show All 9 Lines | while time.time() <= stop_time: | ||||
best_hash = [x.getbestblockhash() for x in rpc_connections] | best_hash = [x.getbestblockhash() for x in rpc_connections] | ||||
if best_hash.count(best_hash[0]) == len(rpc_connections): | if best_hash.count(best_hash[0]) == len(rpc_connections): | ||||
return | return | ||||
time.sleep(wait) | time.sleep(wait) | ||||
raise AssertionError("Block sync timed out:{}".format( | raise AssertionError("Block sync timed out:{}".format( | ||||
"".join("\n {!r}".format(b) for b in best_hash))) | "".join("\n {!r}".format(b) for b in best_hash))) | ||||
def sync_mempools(rpc_connections, *, wait=1, timeout=60, flush_scheduler=True): | def sync_mempools(rpc_connections, *, wait=1, | ||||
timeout=60, flush_scheduler=True): | |||||
""" | """ | ||||
Wait until everybody has the same transactions in their memory | Wait until everybody has the same transactions in their memory | ||||
pools | pools | ||||
""" | """ | ||||
stop_time = time.time() + timeout | stop_time = time.time() + timeout | ||||
while time.time() <= stop_time: | while time.time() <= stop_time: | ||||
pool = [set(r.getrawmempool()) for r in rpc_connections] | pool = [set(r.getrawmempool()) for r in rpc_connections] | ||||
if pool.count(pool[0]) == len(rpc_connections): | if pool.count(pool[0]) == len(rpc_connections): | ||||
▲ Show 20 Lines • Show All 141 Lines • Show Last 20 Lines |