diff --git a/test/functional/abc_p2p_proof_inventory.py b/test/functional/abc_p2p_proof_inventory.py index 5eba81af7..f80e5a7c3 100644 --- a/test/functional/abc_p2p_proof_inventory.py +++ b/test/functional/abc_p2p_proof_inventory.py @@ -1,355 +1,355 @@ #!/usr/bin/env python3 # Copyright (c) 2021 The Bitcoin developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """ Test proof inventory relaying """ import time from test_framework.address import ADDRESS_ECREG_UNSPENDABLE from test_framework.avatools import ( AvaP2PInterface, avalanche_proof_from_hex, gen_proof, get_proof_ids, wait_for_proof, ) from test_framework.messages import ( MSG_AVA_PROOF, MSG_TYPE_MASK, CInv, msg_avaproof, msg_getdata, ) from test_framework.p2p import P2PInterface, p2p_lock from test_framework.test_framework import BitcoinTestFramework from test_framework.util import ( assert_equal, assert_greater_than, assert_raises_rpc_error, uint256_hex, ) from test_framework.wallet_util import bytes_to_wif # Broadcast reattempt occurs every 10 to 15 minutes MAX_INITIAL_BROADCAST_DELAY = 15 * 60 # Delay to allow the node to respond to getdata requests UNCONDITIONAL_RELAY_DELAY = 2 * 60 class ProofInvStoreP2PInterface(P2PInterface): def __init__(self): super().__init__() self.proof_invs_counter = 0 self.last_proofid = None def on_inv(self, message): for i in message.inv: if i.type & MSG_TYPE_MASK == MSG_AVA_PROOF: self.proof_invs_counter += 1 self.last_proofid = i.hash class ProofInventoryTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 5 self.extra_args = [[ '-avaproofstakeutxodustthreshold=1000000', '-avaproofstakeutxoconfirmations=2', '-avacooldown=0', '-whitelist=noban@127.0.0.1', ]] * self.num_nodes def generate_proof(self, node, mature=True): privkey, proof = gen_proof(self, node) if mature: self.generate(node, 1, sync_fun=self.no_op) return privkey, proof def test_send_proof_inv(self): self.log.info("Test sending a proof to our peers") node = self.nodes[0] for _ in range(10): node.add_p2p_connection(ProofInvStoreP2PInterface()) _, proof = self.generate_proof(node) assert node.sendavalancheproof(proof.serialize().hex()) def proof_inv_found(peer): with p2p_lock: return peer.last_proofid == proof.proofid self.wait_until(lambda: all(proof_inv_found(i) for i in node.p2ps)) self.log.info("Test that we don't send the same inv several times") extra_peer = ProofInvStoreP2PInterface() node.add_p2p_connection(extra_peer) # Send the same proof one more time node.sendavalancheproof(proof.serialize().hex()) # Our new extra peer should receive it but not the others self.wait_until(lambda: proof_inv_found(extra_peer)) assert all(p.proof_invs_counter == 1 for p in node.p2ps) # Send the proof again and force the send loop to be processed for peer in node.p2ps: node.sendavalancheproof(proof.serialize().hex()) peer.sync_with_ping() assert all(p.proof_invs_counter == 1 for p in node.p2ps) def test_receive_proof(self): self.log.info("Test a peer is created on proof reception") node = self.nodes[0] _, proof = self.generate_proof(node) peer = node.add_p2p_connection(P2PInterface()) msg = msg_avaproof() msg.proof = proof peer.send_message(msg) self.wait_until(lambda: proof.proofid in get_proof_ids(node)) self.log.info("Test receiving a proof with an immature utxo") _, immature = self.generate_proof(node, mature=False) immature_proofid = uint256_hex(immature.proofid) msg = msg_avaproof() msg.proof = immature peer.send_message(msg) wait_for_proof(node, immature_proofid, expect_status="immature") def test_ban_invalid_proof(self): node = self.nodes[0] _, bad_proof = self.generate_proof(node) bad_proof.stakes = [] privkey = node.get_deterministic_priv_key().key missing_stake = node.buildavalancheproof( 1, 0, privkey, [{ 'txid': '0' * 64, 'vout': 0, 'amount': 10000000, 'height': 42, 'iscoinbase': False, 'privatekey': privkey, }] ) self.restart_node( 0, ['-avaproofstakeutxodustthreshold=1000000']) peer = node.add_p2p_connection(P2PInterface()) msg = msg_avaproof() # Sending a proof with a missing utxo doesn't trigger a ban msg.proof = avalanche_proof_from_hex(missing_stake) with node.assert_debug_log(["received: avaproof"], ["Misbehaving"]): peer.send_message(msg) peer.sync_with_ping() msg.proof = bad_proof with node.assert_debug_log([ 'Misbehaving', 'invalid-proof', ]): peer.send_message(msg) peer.wait_for_disconnect() def test_proof_relay(self): # This test makes no sense with less than 2 nodes ! assert_greater_than(self.num_nodes, 2) proofs_keys = [self.generate_proof(self.nodes[0]) for _ in self.nodes] proofids = {proof_key[1].proofid for proof_key in proofs_keys} # generate_proof does not sync, so do it manually self.sync_blocks() def restart_nodes_with_proof(nodes, extra_args=None): for node in nodes: privkey, proof = proofs_keys[node.index] self.restart_node(node.index, self.extra_args[node.index] + [ f"-avaproof={proof.serialize().hex()}", f"-avamasterkey={bytes_to_wif(privkey.get_bytes())}" ] + (extra_args or [])) restart_nodes_with_proof(self.nodes[:-1]) chainwork = int(self.nodes[-1].getblockchaininfo()['chainwork'], 16) restart_nodes_with_proof( self.nodes[-1:], extra_args=[f'-minimumchainwork={chainwork + 100:#x}']) # Add an inbound so the node proof can be registered and advertised [node.add_p2p_connection(P2PInterface()) for node in self.nodes] [[self.connect_nodes(node.index, j) for j in range(node.index)] for node in self.nodes] # Connect a block to make the proofs added to our pool self.generate(self.nodes[0], 1, sync_fun=self.sync_blocks) self.log.info("Nodes should eventually get the proof from their peer") self.sync_proofs(self.nodes[:-1]) for node in self.nodes[:-1]: assert_equal(set(get_proof_ids(node)), proofids) assert self.nodes[-1].getblockchaininfo()['initialblockdownload'] self.log.info("Except the node that has not completed IBD") assert_equal(len(get_proof_ids(self.nodes[-1])), 1) # The same if we send a proof directly with no download request peer = AvaP2PInterface() self.nodes[-1].add_p2p_connection(peer) _, proof = self.generate_proof(self.nodes[0]) peer.send_avaproof(proof) peer.sync_send_with_ping() with p2p_lock: assert_equal(peer.message_count.get('getdata', 0), 0) # Leave the nodes in good shape for the next tests restart_nodes_with_proof(self.nodes) [[self.connect_nodes(node.index, j) for j in range(node.index)] for node in self.nodes] def test_manually_sent_proof(self): node0 = self.nodes[0] _, proof = self.generate_proof(node0) self.log.info( "Send a proof via RPC and check all the nodes download it") node0.sendavalancheproof(proof.serialize().hex()) self.sync_proofs() def test_unbroadcast(self): self.log.info("Test broadcasting proofs") node = self.nodes[0] # Disconnect the other nodes/peers, or they will request the proof and # invalidate the test [n.stop_node() for n in self.nodes[1:]] node.disconnect_p2ps() def add_peers(count): peers = [] for i in range(count): peer = node.add_p2p_connection(ProofInvStoreP2PInterface()) peer.wait_for_verack() peers.append(peer) return peers _, proof = self.generate_proof(node) proofid_hex = uint256_hex(proof.proofid) # Broadcast the proof peers = add_peers(3) assert node.sendavalancheproof(proof.serialize().hex()) wait_for_proof(node, proofid_hex) def proof_inv_received(peers): with p2p_lock: return all(p.last_message.get( "inv") and p.last_message["inv"].inv[-1].hash == proof.proofid for p in peers) self.wait_until(lambda: proof_inv_received(peers)) # If no peer request the proof for download, the node should reattempt # broadcasting to all new peers after 10 to 15 minutes. peers = add_peers(3) node.mockscheduler(MAX_INITIAL_BROADCAST_DELAY + 1) peers[-1].sync_with_ping() self.wait_until(lambda: proof_inv_received(peers)) # If at least one peer requests the proof, there is no more attempt to # broadcast it node.setmocktime(int(time.time()) + UNCONDITIONAL_RELAY_DELAY) msg = msg_getdata([CInv(t=MSG_AVA_PROOF, h=proof.proofid)]) peers[-1].send_message(msg) # Give enough time for the node to broadcast the proof again peers = add_peers(3) node.mockscheduler(MAX_INITIAL_BROADCAST_DELAY + 1) peers[-1].sync_with_ping() assert not proof_inv_received(peers) self.log.info( "Proofs that become invalid should no longer be broadcasted") # Restart and add connect a new set of peers self.restart_node(0) # Broadcast the proof peers = add_peers(3) assert node.sendavalancheproof(proof.serialize().hex()) self.wait_until(lambda: proof_inv_received(peers)) # Sanity check our node knows the proof, and it is valid wait_for_proof(node, proofid_hex) # Mature the utxo then spend it self.generate(node, 100, sync_fun=self.no_op) utxo = proof.stakes[0].stake.utxo raw_tx = node.createrawtransaction( inputs=[{ # coinbase - "txid": uint256_hex(utxo.hash), + "txid": uint256_hex(utxo.txid), "vout": utxo.n }], outputs={ADDRESS_ECREG_UNSPENDABLE: 25_000_000 - 250.00}, ) signed_tx = node.signrawtransactionwithkey( hexstring=raw_tx, privkeys=[node.get_deterministic_priv_key().key], ) node.sendrawtransaction(signed_tx['hex']) # Mine the tx in a block self.generate(node, 1, sync_fun=self.no_op) # Wait for the proof to be invalidated def check_proof_not_found(proofid): try: assert_raises_rpc_error(-8, "Proof not found", node.getrawavalancheproof, proofid) return True except BaseException: return False self.wait_until(lambda: check_proof_not_found(proofid_hex)) # It should no longer be broadcasted peers = add_peers(3) node.mockscheduler(MAX_INITIAL_BROADCAST_DELAY + 1) peers[-1].sync_with_ping() assert not proof_inv_received(peers) def run_test(self): self.test_send_proof_inv() self.test_receive_proof() self.test_proof_relay() self.test_manually_sent_proof() # Run these tests last because they need to disconnect the nodes self.test_unbroadcast() self.test_ban_invalid_proof() if __name__ == '__main__': ProofInventoryTest().main() diff --git a/test/functional/data/invalid_txs.py b/test/functional/data/invalid_txs.py index 7f8cd4062..14f8103d9 100644 --- a/test/functional/data/invalid_txs.py +++ b/test/functional/data/invalid_txs.py @@ -1,262 +1,262 @@ #!/usr/bin/env python3 # Copyright (c) 2015-2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """ Templates for constructing various sorts of invalid transactions. These templates (or an iterator over all of them) can be reused in different contexts to test using a number of invalid transaction types. Hopefully this makes it easier to get coverage of a full variety of tx validation checks through different interfaces (AcceptBlock, AcceptToMemPool, etc.) without repeating ourselves. Invalid tx cases not covered here can be found by running: $ diff \ <(grep -IREho "bad-txns[a-zA-Z-]+" src | sort -u) \ <(grep -IEho "bad-txns[a-zA-Z-]+" test/functional/data/invalid_txs.py | sort -u) """ import abc from typing import Optional from test_framework import script as sc from test_framework.blocktools import create_tx_with_script from test_framework.messages import MAX_MONEY, COutPoint, CTransaction, CTxIn, CTxOut from test_framework.script import ( OP_2DIV, OP_2MUL, OP_INVERT, OP_LSHIFT, OP_MUL, OP_RSHIFT, CScript, ) from test_framework.txtools import pad_tx basic_p2sh = sc.CScript( [sc.OP_HASH160, sc.hash160(sc.CScript([sc.OP_0])), sc.OP_EQUAL]) class BadTxTemplate: """Allows simple construction of a certain kind of invalid tx. Base class to be subclassed.""" __metaclass__ = abc.ABCMeta # The expected error code given by bitcoind upon submission of the tx. reject_reason: Optional[str] = "" # Only specified if it differs from mempool acceptance error. block_reject_reason = "" # Do we expect to be disconnected after submitting this tx? expect_disconnect = False # Is this tx considered valid when included in a block, but not for acceptance into # the mempool (i.e. does it violate policy but not consensus)? valid_in_block = False def __init__(self, *, spend_tx=None, spend_block=None): self.spend_tx = spend_block.vtx[0] if spend_block else spend_tx self.spend_avail = sum(o.nValue for o in self.spend_tx.vout) self.valid_txin = CTxIn( COutPoint( self.spend_tx.sha256, 0), b"", 0xffffffff) @abc.abstractmethod def get_tx(self, *args, **kwargs): """Return a CTransaction that is invalid per the subclass.""" pass class OutputMissing(BadTxTemplate): reject_reason = "bad-txns-vout-empty" expect_disconnect = True def get_tx(self): tx = CTransaction() tx.vin.append(self.valid_txin) tx.calc_sha256() return tx class InputMissing(BadTxTemplate): reject_reason = "bad-txns-vin-empty" expect_disconnect = True def get_tx(self): tx = CTransaction() tx.vout.append(CTxOut(0, sc.CScript([sc.OP_TRUE] * 100))) tx.calc_sha256() return tx class SizeTooSmall(BadTxTemplate): reject_reason = "bad-txns-undersize" expect_disconnect = False valid_in_block = True def get_tx(self): tx = CTransaction() tx.vin.append(self.valid_txin) tx.vout.append(CTxOut(0, sc.CScript([sc.OP_TRUE]))) tx.calc_sha256() return tx class BadInputOutpointIndex(BadTxTemplate): # Won't be rejected - nonexistent outpoint index is treated as an orphan since the coins # database can't distinguish between spent outpoints and outpoints which # never existed. reject_reason = None expect_disconnect = False def get_tx(self): num_indices = len(self.spend_tx.vin) bad_idx = num_indices + 100 tx = CTransaction() tx.vin.append( CTxIn( COutPoint( self.spend_tx.sha256, bad_idx), b"", 0xffffffff)) tx.vout.append(CTxOut(0, basic_p2sh)) tx.calc_sha256() return tx class DuplicateInput(BadTxTemplate): reject_reason = 'bad-txns-inputs-duplicate' expect_disconnect = True def get_tx(self): tx = CTransaction() tx.vin.append(self.valid_txin) tx.vin.append(self.valid_txin) tx.vout.append(CTxOut(1, basic_p2sh)) tx.calc_sha256() return tx class PrevoutNullInput(BadTxTemplate): reject_reason = 'bad-txns-prevout-null' expect_disconnect = True def get_tx(self): tx = CTransaction() tx.vin.append(self.valid_txin) - tx.vin.append(CTxIn(COutPoint(hash=0, n=0xffffffff))) + tx.vin.append(CTxIn(COutPoint(txid=0, n=0xffffffff))) tx.vout.append(CTxOut(1, basic_p2sh)) tx.calc_sha256() return tx class NonexistentInput(BadTxTemplate): # Added as an orphan tx. reject_reason = None expect_disconnect = False def get_tx(self): tx = CTransaction() tx.vin.append( CTxIn( COutPoint( self.spend_tx.sha256 + 1, 0), b"", 0xffffffff)) tx.vin.append(self.valid_txin) tx.vout.append(CTxOut(1, basic_p2sh)) tx.calc_sha256() return tx class SpendTooMuch(BadTxTemplate): reject_reason = 'bad-txns-in-belowout' expect_disconnect = True def get_tx(self): return create_tx_with_script( self.spend_tx, 0, script_pub_key=basic_p2sh, amount=(self.spend_avail + 1)) class CreateNegative(BadTxTemplate): reject_reason = 'bad-txns-vout-negative' expect_disconnect = True def get_tx(self): return create_tx_with_script(self.spend_tx, 0, amount=-1) class CreateTooLarge(BadTxTemplate): reject_reason = 'bad-txns-vout-toolarge' expect_disconnect = True def get_tx(self): return create_tx_with_script(self.spend_tx, 0, amount=MAX_MONEY + 1) class CreateSumTooLarge(BadTxTemplate): reject_reason = 'bad-txns-txouttotal-toolarge' expect_disconnect = True def get_tx(self): tx = create_tx_with_script(self.spend_tx, 0, amount=MAX_MONEY) tx.vout = [tx.vout[0]] * 2 tx.calc_sha256() return tx class InvalidOPIFConstruction(BadTxTemplate): reject_reason = "mandatory-script-verify-flag-failed (Invalid OP_IF construction)" expect_disconnect = True valid_in_block = True def get_tx(self): return create_tx_with_script( self.spend_tx, 0, script_sig=b'\x64' * 35, amount=(self.spend_avail // 2)) def getDisabledOpcodeTemplate(opcode): """ Creates disabled opcode tx template class""" def get_tx(self): tx = CTransaction() vin = self.valid_txin vin.scriptSig = CScript([opcode]) tx.vin.append(vin) tx.vout.append(CTxOut(1, basic_p2sh)) pad_tx(tx) tx.calc_sha256() return tx return type(f"DisabledOpcode_{str(opcode)}", (BadTxTemplate,), { 'reject_reason': "disabled opcode", 'expect_disconnect': True, 'get_tx': get_tx, 'valid_in_block': True }) # Disabled opcode tx templates (CVE-2010-5137) DisabledOpcodeTemplates = [getDisabledOpcodeTemplate(opcode) for opcode in [ OP_INVERT, OP_2MUL, OP_2DIV, OP_MUL, OP_LSHIFT, OP_RSHIFT]] def iter_all_templates(): """Iterate through all bad transaction template types.""" return BadTxTemplate.__subclasses__() diff --git a/test/functional/feature_utxo_set_hash.py b/test/functional/feature_utxo_set_hash.py index 3b072abca..d6fe6971f 100755 --- a/test/functional/feature_utxo_set_hash.py +++ b/test/functional/feature_utxo_set_hash.py @@ -1,91 +1,91 @@ #!/usr/bin/env python3 # Copyright (c) 2020-2021 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test UTXO set hash value calculation in gettxoutsetinfo.""" import struct from test_framework.blocktools import create_transaction from test_framework.messages import CBlock, COutPoint, FromHex from test_framework.muhash import MuHash3072 from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal class UTXOSetHashTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 self.setup_clean_chain = True def skip_test_if_missing_module(self): self.skip_if_no_wallet() def test_deterministic_hash_results(self): self.log.info("Test deterministic UTXO set hash results") # These depend on the setup_clean_chain option, the chain loaded from # the cache assert_equal( self.nodes[0].gettxoutsetinfo()['hash_serialized'], "b32ec1dda5a53cd025b95387aad344a801825fe46a60ff952ce26528f01d3be8") assert_equal( self.nodes[0].gettxoutsetinfo("muhash")['muhash'], "dd5ad2a105c2d29495f577245c357409002329b9f4d6182c0af3dc2f462555c8") def test_muhash_implementation(self): self.log.info("Test MuHash implementation consistency") node = self.nodes[0] # Generate 100 blocks and remove the first since we plan to spend its # coinbase block_hashes = self.generate(node, 100) blocks = [ FromHex(CBlock(), node.getblock(block, False)) for block in block_hashes] spending = blocks.pop(0) # Create a spending transaction and mine a block which includes it tx = create_transaction( node, spending.vtx[0].rehash(), node.getnewaddress(), amount=49_000_000) txid = node.sendrawtransaction( hexstring=tx.serialize().hex(), maxfeerate=0) tx_block = self.generateblock(node, output=node.getnewaddress(), transactions=[txid]) blocks.append( FromHex(CBlock(), node.getblock(tx_block['hash'], False))) # Serialize the outputs that should be in the UTXO set and add them to # a MuHash object muhash = MuHash3072() for height, block in enumerate(blocks): # The Genesis block coinbase is not part of the UTXO set and we # spent the first mined block height += 2 for tx in block.vtx: for n, tx_out in enumerate(tx.vout): - coinbase = 1 if not tx.vin[0].prevout.hash else 0 + coinbase = 1 if not tx.vin[0].prevout.txid else 0 data = COutPoint(int(tx.rehash(), 16), n).serialize() data += struct.pack("1650 bytes) tx.vin[0].scriptSig = CScript([b'a' * 1648]) self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'scriptsig-size'}], rawtxs=[tx.serialize().hex()], ) tx = FromHex(CTransaction(), raw_tx_reference) output_p2sh_burn = CTxOut(nValue=540, scriptPubKey=CScript( [OP_HASH160, hash160(b'burn'), OP_EQUAL])) # Use enough outputs to make the tx too large for our policy num_scripts = 100000 // len(output_p2sh_burn.serialize()) tx.vout = [output_p2sh_burn] * num_scripts self.check_mempool_result( result_expected=[ {'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'tx-size'}], rawtxs=[ToHex(tx)], ) tx = FromHex(CTransaction(), raw_tx_reference) tx.vout[0] = output_p2sh_burn # Make output smaller, such that it is dust for our policy tx.vout[0].nValue -= 1 self.check_mempool_result( result_expected=[ {'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'dust'}], rawtxs=[ToHex(tx)], ) tx = FromHex(CTransaction(), raw_tx_reference) tx.vout[0].scriptPubKey = CScript([OP_RETURN, b'\xff']) tx.vout = [tx.vout[0]] * 2 self.check_mempool_result( result_expected=[ {'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'multi-op-return'}], rawtxs=[ToHex(tx)], ) self.log.info('A timelocked transaction') tx = FromHex(CTransaction(), raw_tx_reference) # Should be non-max, so locktime is not ignored tx.vin[0].nSequence -= 1 tx.nLockTime = node.getblockcount() + 1 self.check_mempool_result( result_expected=[ {'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'bad-txns-nonfinal'}], rawtxs=[ToHex(tx)], ) self.log.info('A transaction that is locked by BIP68 sequence logic') tx = FromHex(CTransaction(), raw_tx_reference) # We could include it in the second block mined from now, but not the # very next one tx.vin[0].nSequence = 2 # Can skip re-signing the tx because of early rejection self.check_mempool_result( result_expected=[{'txid': tx.rehash(), 'allowed': False, 'reject-reason': 'non-BIP68-final'}], rawtxs=[tx.serialize().hex()], maxfeerate=0, ) if __name__ == '__main__': MempoolAcceptanceTest().main() diff --git a/test/functional/mining_basic.py b/test/functional/mining_basic.py index 0e1f9ee4b..8e67e2378 100755 --- a/test/functional/mining_basic.py +++ b/test/functional/mining_basic.py @@ -1,285 +1,285 @@ #!/usr/bin/env python3 # Copyright (c) 2014-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test mining RPCs - getmininginfo - getblocktemplate proposal mode - submitblock""" import copy from decimal import Decimal from test_framework.blocktools import TIME_GENESIS_BLOCK, create_coinbase from test_framework.messages import BLOCK_HEADER_SIZE, CBlock, CBlockHeader from test_framework.p2p import P2PDataStore from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal, assert_raises_rpc_error VERSIONBITS_TOP_BITS = 0x20000000 def assert_template(node, block, expect, rehash=True): if rehash: block.hashMerkleRoot = block.calc_merkle_root() rsp = node.getblocktemplate( template_request={ 'data': block.serialize().hex(), 'mode': 'proposal'}) assert_equal(rsp, expect) class MiningTest(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 2 self.setup_clean_chain = True self.supports_cli = False def mine_chain(self): self.log.info('Create some old blocks') node = self.nodes[0] address = node.get_deterministic_priv_key().address for t in range(TIME_GENESIS_BLOCK, TIME_GENESIS_BLOCK + 200 * 600, 600): node.setmocktime(t) self.generatetoaddress(node, 1, address, sync_fun=self.no_op) mining_info = node.getmininginfo() assert_equal(mining_info['blocks'], 200) assert_equal(mining_info['currentblocktx'], 0) assert_equal(mining_info['currentblocksize'], 1000) self.log.info('test blockversion') self.restart_node( 0, extra_args=[f'-mocktime={t}', '-blockversion=1337']) self.connect_nodes(0, 1) assert_equal(1337, self.nodes[0].getblocktemplate()['version']) self.restart_node(0, extra_args=[f'-mocktime={t}']) self.connect_nodes(0, 1) assert_equal( VERSIONBITS_TOP_BITS, self.nodes[0].getblocktemplate()['version']) self.restart_node(0) self.connect_nodes(0, 1) def run_test(self): self.mine_chain() node = self.nodes[0] def assert_submitblock(block, result_str_1, result_str_2=None): block.solve() result_str_2 = result_str_2 or 'duplicate-invalid' assert_equal(result_str_1, node.submitblock( hexdata=block.serialize().hex())) assert_equal(result_str_2, node.submitblock( hexdata=block.serialize().hex())) self.log.info('getmininginfo') mining_info = node.getmininginfo() assert_equal(mining_info['blocks'], 200) assert_equal(mining_info['chain'], self.chain) assert 'currentblocktx' not in mining_info assert 'currentblocksize' not in mining_info assert_equal(mining_info['difficulty'], Decimal('4.656542373906925E-10')) assert_equal(mining_info['networkhashps'], Decimal('0.003333333333333334')) assert_equal(mining_info['pooledtx'], 0) # Mine a block to leave initial block download self.generatetoaddress( node, 1, node.get_deterministic_priv_key().address) tmpl = node.getblocktemplate() self.log.info("getblocktemplate: Test capability advertised") assert 'proposal' in tmpl['capabilities'] next_height = int(tmpl["height"]) coinbase_tx = create_coinbase(height=next_height) # sequence numbers must not be max for nLockTime to have effect coinbase_tx.vin[0].nSequence = 2 ** 32 - 2 coinbase_tx.rehash() block = CBlock() block.nVersion = tmpl["version"] block.hashPrevBlock = int(tmpl["previousblockhash"], 16) block.nTime = tmpl["curtime"] block.nBits = int(tmpl["bits"], 16) block.nNonce = 0 block.vtx = [coinbase_tx] self.log.info("getblocktemplate: Test valid block") assert_template(node, block, None) self.log.info("submitblock: Test block decode failure") assert_raises_rpc_error(-22, "Block decode failed", node.submitblock, block.serialize()[:-15].hex()) self.log.info( "getblocktemplate: Test bad input hash for coinbase transaction") bad_block = copy.deepcopy(block) - bad_block.vtx[0].vin[0].prevout.hash += 1 + bad_block.vtx[0].vin[0].prevout.txid += 1 bad_block.vtx[0].rehash() assert_template(node, bad_block, 'bad-cb-missing') self.log.info("submitblock: Test invalid coinbase transaction") assert_raises_rpc_error(-22, "Block does not start with a coinbase", node.submitblock, bad_block.serialize().hex()) self.log.info("getblocktemplate: Test truncated final transaction") assert_raises_rpc_error(-22, "Block decode failed", node.getblocktemplate, { 'data': block.serialize()[:-1].hex(), 'mode': 'proposal'}) self.log.info("getblocktemplate: Test duplicate transaction") bad_block = copy.deepcopy(block) bad_block.vtx.append(bad_block.vtx[0]) assert_template(node, bad_block, 'bad-txns-duplicate') assert_submitblock(bad_block, 'bad-txns-duplicate', 'bad-txns-duplicate') self.log.info("getblocktemplate: Test invalid transaction") bad_block = copy.deepcopy(block) bad_tx = copy.deepcopy(bad_block.vtx[0]) - bad_tx.vin[0].prevout.hash = 255 + bad_tx.vin[0].prevout.txid = 255 bad_tx.rehash() bad_block.vtx.append(bad_tx) assert_template(node, bad_block, 'bad-txns-inputs-missingorspent') assert_submitblock(bad_block, 'bad-txns-inputs-missingorspent') self.log.info("getblocktemplate: Test nonfinal transaction") bad_block = copy.deepcopy(block) bad_block.vtx[0].nLockTime = 2 ** 32 - 1 bad_block.vtx[0].rehash() assert_template(node, bad_block, 'bad-txns-nonfinal') assert_submitblock(bad_block, 'bad-txns-nonfinal') self.log.info("getblocktemplate: Test bad tx count") # The tx count is immediately after the block header bad_block_sn = bytearray(block.serialize()) assert_equal(bad_block_sn[BLOCK_HEADER_SIZE], 1) bad_block_sn[BLOCK_HEADER_SIZE] += 1 assert_raises_rpc_error(-22, "Block decode failed", node.getblocktemplate, { 'data': bad_block_sn.hex(), 'mode': 'proposal'}) self.log.info("getblocktemplate: Test bad bits") bad_block = copy.deepcopy(block) bad_block.nBits = 469762303 # impossible in the real world assert_template(node, bad_block, 'bad-diffbits') self.log.info("getblocktemplate: Test bad merkle root") bad_block = copy.deepcopy(block) bad_block.hashMerkleRoot += 1 assert_template(node, bad_block, 'bad-txnmrklroot', False) assert_submitblock(bad_block, 'bad-txnmrklroot', 'bad-txnmrklroot') self.log.info("getblocktemplate: Test bad timestamps") bad_block = copy.deepcopy(block) bad_block.nTime = 2 ** 31 - 1 assert_template(node, bad_block, 'time-too-new') assert_submitblock(bad_block, 'time-too-new', 'time-too-new') bad_block.nTime = 0 assert_template(node, bad_block, 'time-too-old') assert_submitblock(bad_block, 'time-too-old', 'time-too-old') self.log.info("getblocktemplate: Test not best block") bad_block = copy.deepcopy(block) bad_block.hashPrevBlock = 123 assert_template(node, bad_block, 'inconclusive-not-best-prevblk') assert_submitblock(bad_block, 'prev-blk-not-found', 'prev-blk-not-found') self.log.info('submitheader tests') assert_raises_rpc_error(-22, 'Block header decode failed', lambda: node.submitheader(hexdata='xx' * BLOCK_HEADER_SIZE)) assert_raises_rpc_error(-22, 'Block header decode failed', lambda: node.submitheader(hexdata='ff' * (BLOCK_HEADER_SIZE - 2))) assert_raises_rpc_error(-25, 'Must submit previous header', lambda: node.submitheader(hexdata=super(CBlock, bad_block).serialize().hex())) block.nTime += 1 block.solve() def chain_tip(b_hash, *, status='headers-only', branchlen=1): return {'hash': b_hash, 'height': 202, 'branchlen': branchlen, 'status': status} assert chain_tip(block.hash) not in node.getchaintips() node.submitheader(hexdata=block.serialize().hex()) assert chain_tip(block.hash) in node.getchaintips() # Noop node.submitheader(hexdata=CBlockHeader(block).serialize().hex()) assert chain_tip(block.hash) in node.getchaintips() bad_block_root = copy.deepcopy(block) bad_block_root.hashMerkleRoot += 2 bad_block_root.solve() assert chain_tip(bad_block_root.hash) not in node.getchaintips() node.submitheader(hexdata=CBlockHeader( bad_block_root).serialize().hex()) assert chain_tip(bad_block_root.hash) in node.getchaintips() # Should still reject invalid blocks, even if we have the header: assert_equal(node.submitblock( hexdata=bad_block_root.serialize().hex()), 'bad-txnmrklroot') assert_equal(node.submitblock( hexdata=bad_block_root.serialize().hex()), 'bad-txnmrklroot') assert chain_tip(bad_block_root.hash) in node.getchaintips() # We know the header for this invalid block, so should just return # early without error: node.submitheader(hexdata=CBlockHeader( bad_block_root).serialize().hex()) assert chain_tip(bad_block_root.hash) in node.getchaintips() bad_block_lock = copy.deepcopy(block) bad_block_lock.vtx[0].nLockTime = 2**32 - 1 bad_block_lock.vtx[0].rehash() bad_block_lock.hashMerkleRoot = bad_block_lock.calc_merkle_root() bad_block_lock.solve() assert_equal(node.submitblock( hexdata=bad_block_lock.serialize().hex()), 'bad-txns-nonfinal') assert_equal(node.submitblock( hexdata=bad_block_lock.serialize().hex()), 'duplicate-invalid') # Build a "good" block on top of the submitted bad block bad_block2 = copy.deepcopy(block) bad_block2.hashPrevBlock = bad_block_lock.sha256 bad_block2.solve() assert_raises_rpc_error(-25, 'bad-prevblk', lambda: node.submitheader( hexdata=CBlockHeader(bad_block2).serialize().hex())) # Should reject invalid header right away bad_block_time = copy.deepcopy(block) bad_block_time.nTime = 1 bad_block_time.solve() assert_raises_rpc_error(-25, 'time-too-old', lambda: node.submitheader( hexdata=CBlockHeader(bad_block_time).serialize().hex())) # Should ask for the block from a p2p node, if they announce the header # as well: peer = node.add_p2p_connection(P2PDataStore()) # Drop the first getheaders peer.wait_for_getheaders(timeout=5) peer.send_blocks_and_test(blocks=[block], node=node) # Must be active now: assert chain_tip(block.hash, status='active', branchlen=0) in node.getchaintips() # Building a few blocks should give the same results self.generatetoaddress( node, 10, node.get_deterministic_priv_key().address) assert_raises_rpc_error(-25, 'time-too-old', lambda: node.submitheader( hexdata=CBlockHeader(bad_block_time).serialize().hex())) assert_raises_rpc_error(-25, 'bad-prevblk', lambda: node.submitheader( hexdata=CBlockHeader(bad_block2).serialize().hex())) node.submitheader(hexdata=CBlockHeader(block).serialize().hex()) node.submitheader(hexdata=CBlockHeader( bad_block_root).serialize().hex()) # valid assert_equal(node.submitblock( hexdata=block.serialize().hex()), 'duplicate') # Sanity check that maxtries supports large integers self.generatetoaddress(node, 1, node.get_deterministic_priv_key().address, pow( 2, 32)) if __name__ == '__main__': MiningTest().main() diff --git a/test/functional/test_framework/messages.py b/test/functional/test_framework/messages.py index 7adbbd139..25fe1dcb3 100755 --- a/test/functional/test_framework/messages.py +++ b/test/functional/test_framework/messages.py @@ -1,2307 +1,2307 @@ #!/usr/bin/env python3 # Copyright (c) 2010 ArtForz -- public domain half-a-node # Copyright (c) 2012 Jeff Garzik # Copyright (c) 2010-2019 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Bitcoin test framework primitive and message structures CBlock, CTransaction, CBlockHeader, CTxIn, CTxOut, etc....: data structures that should map to corresponding structures in bitcoin/primitives msg_block, msg_tx, msg_headers, etc.: data structures that represent network messages ser_*, deser_*: functions that handle serialization/deserialization. Classes use __slots__ to ensure extraneous attributes aren't accidentally added by tests, compromising their intended effect. """ import copy import hashlib import random import socket import struct import time import unittest from base64 import b64decode, b64encode from enum import IntEnum from io import BytesIO from typing import List from test_framework.siphash import siphash256 from test_framework.util import assert_equal, uint256_hex MAX_LOCATOR_SZ = 101 MAX_BLOCK_BASE_SIZE = 1000000 MAX_BLOOM_FILTER_SIZE = 36000 MAX_BLOOM_HASH_FUNCS = 50 # 1,000,000 XEC in satoshis (legacy BCHA) COIN = 100000000 # 1 XEC in satoshis XEC = 100 MAX_MONEY = 21000000 * COIN # Maximum length of incoming protocol messages MAX_PROTOCOL_MESSAGE_LENGTH = 2 * 1024 * 1024 MAX_HEADERS_RESULTS = 2000 # Number of headers sent in one getheaders result MAX_INV_SIZE = 50000 # Maximum number of entries in an 'inv' protocol message NODE_NETWORK = (1 << 0) NODE_GETUTXO = (1 << 1) NODE_BLOOM = (1 << 2) # NODE_WITNESS = (1 << 3) # NODE_XTHIN = (1 << 4) # removed in v0.22.12 NODE_COMPACT_FILTERS = (1 << 6) NODE_NETWORK_LIMITED = (1 << 10) NODE_AVALANCHE = (1 << 24) MSG_TX = 1 MSG_BLOCK = 2 MSG_FILTERED_BLOCK = 3 MSG_CMPCT_BLOCK = 4 MSG_AVA_PROOF = 0x1f000001 MSG_TYPE_MASK = 0xffffffff >> 2 FILTER_TYPE_BASIC = 0 # Serialization/deserialization tools def sha256(s): return hashlib.new('sha256', s).digest() def hash256(s): return sha256(sha256(s)) def ser_compact_size(size): r = b"" if size < 253: r = struct.pack("B", size) elif size < 0x10000: r = struct.pack(">= 32 return rs def uint256_from_str(s): r = 0 t = struct.unpack("> 24) & 0xFF v = (c & 0xFFFFFF) << (8 * (nbytes - 3)) return v # deser_function_name: Allow for an alternate deserialization function on the # entries in the vector. def deser_vector(f, c, deser_function_name=None): nit = deser_compact_size(f) r = [] for _ in range(nit): t = c() if deser_function_name: getattr(t, deser_function_name)(f) else: t.deserialize(f) r.append(t) return r # ser_function_name: Allow for an alternate serialization function on the # entries in the vector. def ser_vector(v, ser_function_name=None): r = ser_compact_size(len(v)) for i in v: if ser_function_name: r += getattr(i, ser_function_name)() else: r += i.serialize() return r def deser_uint256_vector(f): nit = deser_compact_size(f) r = [] for _ in range(nit): t = deser_uint256(f) r.append(t) return r def ser_uint256_vector(v): r = ser_compact_size(len(v)) for i in v: r += ser_uint256(i) return r def deser_string_vector(f): nit = deser_compact_size(f) r = [] for _ in range(nit): t = deser_string(f) r.append(t) return r def ser_string_vector(v): r = ser_compact_size(len(v)) for sv in v: r += ser_string(sv) return r def FromHex(obj, hex_string): """Deserialize from a hex string representation (eg from RPC)""" obj.deserialize(BytesIO(bytes.fromhex(hex_string))) return obj def ToHex(obj): """Convert a binary-serializable object to hex (eg for submission via RPC)""" return obj.serialize().hex() # Objects that map to bitcoind objects, which can be serialized/deserialized class CAddress: __slots__ = ("net", "ip", "nServices", "port", "time") # see https://github.com/bitcoin/bips/blob/master/bip-0155.mediawiki NET_IPV4 = 1 ADDRV2_NET_NAME = { NET_IPV4: "IPv4" } ADDRV2_ADDRESS_LENGTH = { NET_IPV4: 4 } def __init__(self): self.time = 0 self.nServices = 1 self.net = self.NET_IPV4 self.ip = "0.0.0.0" self.port = 0 def deserialize(self, f, *, with_time=True): """Deserialize from addrv1 format (pre-BIP155)""" if with_time: # VERSION messages serialize CAddress objects without time self.time = struct.unpack("H", f.read(2))[0] def serialize(self, *, with_time=True): """Serialize in addrv1 format (pre-BIP155)""" assert self.net == self.NET_IPV4 r = b"" if with_time: # VERSION messages serialize CAddress objects without time r += struct.pack("H", self.port) return r def deserialize_v2(self, f): """Deserialize from addrv2 format (BIP155)""" self.time = struct.unpack("H", f.read(2))[0] def serialize_v2(self): """Serialize in addrv2 format (BIP155)""" assert self.net == self.NET_IPV4 r = b"" r += struct.pack("H", self.port) return r def __repr__(self): return ( f"CAddress(nServices={self.nServices} net={self.ADDRV2_NET_NAME[self.net]} " f"addr={self.ip} port={self.port})" ) class CInv: __slots__ = ("hash", "type") typemap = { 0: "Error", MSG_TX: "TX", MSG_BLOCK: "Block", MSG_FILTERED_BLOCK: "filtered Block", MSG_CMPCT_BLOCK: "CompactBlock", MSG_AVA_PROOF: "avalanche proof", } def __init__(self, t=0, h=0): self.type = t self.hash = h def deserialize(self, f): self.type = struct.unpack(" MAX_MONEY: return False return True def __repr__(self): return ( f"CTransaction(nVersion={self.nVersion} vin={self.vin!r} " f"vout={self.vout!r} nLockTime={self.nLockTime})" ) class CBlockHeader: __slots__ = ("hash", "hashMerkleRoot", "hashPrevBlock", "nBits", "nNonce", "nTime", "nVersion", "sha256") def __init__(self, header=None): if header is None: self.set_null() else: self.nVersion = header.nVersion self.hashPrevBlock = header.hashPrevBlock self.hashMerkleRoot = header.hashMerkleRoot self.nTime = header.nTime self.nBits = header.nBits self.nNonce = header.nNonce self.sha256 = header.sha256 self.hash = header.hash self.calc_sha256() def set_null(self): self.nVersion = 1 self.hashPrevBlock = 0 self.hashMerkleRoot = 0 self.nTime = 0 self.nBits = 0 self.nNonce = 0 self.sha256 = None self.hash = None def deserialize(self, f): self.nVersion = struct.unpack(" 1: newhashes = [] for i in range(0, len(hashes), 2): i2 = min(i + 1, len(hashes) - 1) newhashes.append(hash256(hashes[i] + hashes[i2])) hashes = newhashes return uint256_from_str(hashes[0]) def calc_merkle_root(self): hashes = [] for tx in self.vtx: tx.calc_sha256() hashes.append(ser_uint256(tx.sha256)) return self.get_merkle_root(hashes) def is_valid(self): self.calc_sha256() target = uint256_from_compact(self.nBits) if self.sha256 > target: return False for tx in self.vtx: if not tx.is_valid(): return False if self.calc_merkle_root() != self.hashMerkleRoot: return False return True def solve(self): self.rehash() target = uint256_from_compact(self.nBits) while self.sha256 > target: self.nNonce += 1 self.rehash() def __repr__(self): return ( f"CBlock(nVersion={self.nVersion} " f"hashPrevBlock={uint256_hex(self.hashPrevBlock)} " f"hashMerkleRoot={uint256_hex(self.hashMerkleRoot)} " f"nTime={self.nTime} nBits={self.nBits:08x} " f"nNonce={self.nNonce:08x} vtx={self.vtx!r})" ) class PrefilledTransaction: __slots__ = ("index", "tx") def __init__(self, index=0, tx=None): self.index = index self.tx = tx def deserialize(self, f): self.index = deser_compact_size(f) self.tx = CTransaction() self.tx.deserialize(f) def serialize(self): r = b"" r += ser_compact_size(self.index) r += self.tx.serialize() return r def __repr__(self): return f"PrefilledTransaction(index={self.index}, tx={self.tx!r})" # This is what we send on the wire, in a cmpctblock message. class P2PHeaderAndShortIDs: __slots__ = ("header", "nonce", "prefilled_txn", "prefilled_txn_length", "shortids", "shortids_length") def __init__(self): self.header = CBlockHeader() self.nonce = 0 self.shortids_length = 0 self.shortids = [] self.prefilled_txn_length = 0 self.prefilled_txn = [] def deserialize(self, f): self.header.deserialize(f) self.nonce = struct.unpack("> 1 self.pubkey = deser_string(f) def serialize(self) -> bytes: r = self.utxo.serialize() height_ser = self.height << 1 | int(self.is_coinbase) r += struct.pack(' bytes: return self.stake.serialize() + self.sig class AvalancheProof: __slots__ = ( "sequence", "expiration", "master", "stakes", "payout_script", "signature", "limited_proofid", "proofid") def __init__(self, sequence=0, expiration=0, master=b"", signed_stakes=None, payout_script=b"", signature=b""): self.sequence: int = sequence self.expiration: int = expiration self.master: bytes = master self.stakes: List[AvalancheSignedStake] = signed_stakes or [ AvalancheSignedStake()] self.payout_script = payout_script self.signature = signature self.limited_proofid: int = None self.proofid: int = None self.compute_proof_id() def compute_proof_id(self): """Compute Bitcoin's 256-bit hash (double SHA-256) of the serialized proof data. """ ss = struct.pack(" int: return uint256_from_str(hash256( ser_uint256(self.limited_proofid) + ser_string(self.proof_master))) def deserialize(self, f): self.limited_proofid = deser_uint256(f) self.proof_master = deser_string(f) self.levels = deser_vector(f, AvalancheDelegationLevel) self.proofid = self.compute_proofid() def serialize(self): r = b"" r += ser_uint256(self.limited_proofid) r += ser_string(self.proof_master) r += ser_vector(self.levels) return r def __repr__(self): return f"AvalancheDelegation(" \ f"limitedProofId={uint256_hex(self.limited_proofid)}, " \ f"proofMaster={self.proof_master.hex()}, " \ f"proofid={uint256_hex(self.proofid)}, " \ f"levels={self.levels})" def getid(self): h = ser_uint256(self.proofid) for level in self.levels: h = hash256(h + ser_string(level.pubkey)) return h class AvalancheHello: __slots__ = ("delegation", "sig") def __init__(self, delegation=AvalancheDelegation(), sig=b"\0" * 64): self.delegation = delegation self.sig = sig def deserialize(self, f): self.delegation.deserialize(f) self.sig = f.read(64) def serialize(self): r = b"" r += self.delegation.serialize() r += self.sig return r def __repr__(self): return f"AvalancheHello(delegation={self.delegation!r}, sig={self.sig})" def get_sighash(self, node): b = self.delegation.getid() b += struct.pack(" class msg_headers: __slots__ = ("headers",) msgtype = b"headers" def __init__(self, headers=None): self.headers = headers if headers is not None else [] def deserialize(self, f): # comment in bitcoind indicates these should be deserialized as blocks blocks = deser_vector(f, CBlock) for x in blocks: self.headers.append(CBlockHeader(x)) def serialize(self): blocks = [CBlock(x) for x in self.headers] return ser_vector(blocks) def __repr__(self): return f"msg_headers(headers={self.headers!r})" class msg_merkleblock: __slots__ = ("merkleblock",) msgtype = b"merkleblock" def __init__(self, merkleblock=None): if merkleblock is None: self.merkleblock = CMerkleBlock() else: self.merkleblock = merkleblock def deserialize(self, f): self.merkleblock.deserialize(f) def serialize(self): return self.merkleblock.serialize() def __repr__(self): return f"msg_merkleblock(merkleblock={self.merkleblock!r})" class msg_filterload: __slots__ = ("data", "nHashFuncs", "nTweak", "nFlags") msgtype = b"filterload" def __init__(self, data=b'00', nHashFuncs=0, nTweak=0, nFlags=0): self.data = data self.nHashFuncs = nHashFuncs self.nTweak = nTweak self.nFlags = nFlags def deserialize(self, f): self.data = deser_string(f) self.nHashFuncs = struct.unpack("