Page MenuHomePhabricator

No OneTemporary

diff --git a/qa/pull-tester/rpc-tests.sh b/qa/pull-tester/rpc-tests.sh
index 72a282bc00..5d581819da 100755
--- a/qa/pull-tester/rpc-tests.sh
+++ b/qa/pull-tester/rpc-tests.sh
@@ -1,83 +1,84 @@
#!/bin/bash
set -e
CURDIR=$(cd $(dirname "$0"); pwd)
# Get BUILDDIR and REAL_BITCOIND
. "${CURDIR}/tests-config.sh"
export BITCOINCLI=${BUILDDIR}/qa/pull-tester/run-bitcoin-cli
export BITCOIND=${REAL_BITCOIND}
if [ "x${EXEEXT}" = "x.exe" ]; then
echo "Win tests currently disabled"
exit 0
fi
#Run the tests
testScripts=(
'wallet.py'
'listtransactions.py'
'mempool_resurrect_test.py'
'txn_doublespend.py --mineblock'
'txn_clone.py'
'getchaintips.py'
'rawtransactions.py'
'rest.py'
'mempool_spendcoinbase.py'
'mempool_coinbase_spends.py'
'httpbasics.py'
'zapwallettxes.py'
'proxy_test.py'
'merkle_blocks.py'
'fundrawtransaction.py'
'signrawtransactions.py'
'walletbackup.py'
'nodehandling.py'
'reindex.py'
'decodescript.py'
+ 'p2p-fullblocktest.py'
);
testScriptsExt=(
'bipdersig-p2p.py'
'bipdersig.py'
'getblocktemplate_longpoll.py'
'getblocktemplate_proposals.py'
'txn_doublespend.py'
'txn_clone.py --mineblock'
'pruning.py'
'forknotify.py'
'invalidateblock.py'
'keypool.py'
'receivedby.py'
'rpcbind_test.py'
# 'script_test.py'
'smartfees.py'
'maxblocksinflight.py'
'invalidblockrequest.py'
# 'forknotify.py'
'p2p-acceptblock.py'
);
extArg="-extended"
passOn=${@#$extArg}
if [ "x${ENABLE_BITCOIND}${ENABLE_UTILS}${ENABLE_WALLET}" = "x111" ]; then
for (( i = 0; i < ${#testScripts[@]}; i++ ))
do
if [ -z "$1" ] || [ "${1:0:1}" == "-" ] || [ "$1" == "${testScripts[$i]}" ] || [ "$1.py" == "${testScripts[$i]}" ]
then
echo -e "Running testscript \033[1m${testScripts[$i]}...\033[0m"
${BUILDDIR}/qa/rpc-tests/${testScripts[$i]} --srcdir "${BUILDDIR}/src" ${passOn}
fi
done
for (( i = 0; i < ${#testScriptsExt[@]}; i++ ))
do
if [ "$1" == $extArg ] || [ "$1" == "${testScriptsExt[$i]}" ] || [ "$1.py" == "${testScriptsExt[$i]}" ]
then
echo -e "Running \033[1m2nd level\033[0m testscript \033[1m${testScriptsExt[$i]}...\033[0m"
${BUILDDIR}/qa/rpc-tests/${testScriptsExt[$i]} --srcdir "${BUILDDIR}/src" ${passOn}
fi
done
else
echo "No rpc tests to run. Wallet, utils, and bitcoind must all be enabled"
fi
diff --git a/qa/rpc-tests/README.md b/qa/rpc-tests/README.md
index cfda8fe91f..c6d1721282 100644
--- a/qa/rpc-tests/README.md
+++ b/qa/rpc-tests/README.md
@@ -1,51 +1,152 @@
-Regression tests of RPC interface
-=================================
+Regression tests
+================
### [python-bitcoinrpc](https://github.com/jgarzik/python-bitcoinrpc)
Git subtree of [https://github.com/jgarzik/python-bitcoinrpc](https://github.com/jgarzik/python-bitcoinrpc).
Changes to python-bitcoinrpc should be made upstream, and then
pulled here using git subtree.
### [test_framework/test_framework.py](test_framework/test_framework.py)
Base class for new regression tests.
### [test_framework/util.py](test_framework/util.py)
Generally useful functions.
+### [test_framework/mininode.py](test_framework/mininode.py)
+Basic code to support p2p connectivity to a bitcoind.
+
+### [test_framework/comptool.py](test_framework/comptool.py)
+Framework for comparison-tool style, p2p tests.
+
+### [test_framework/script.py](test_framework/script.py)
+Utilities for manipulating transaction scripts (originally from python-bitcoinlib)
+
+### [test_framework/blockstore.py](test_framework/blockstore.py)
+Implements disk-backed block and tx storage.
+
+### [test_framework/key.py](test_framework/key.py)
+Wrapper around OpenSSL EC_Key (originally from python-bitcoinlib)
+
+### [test_framework/bignum.py](test_framework/bignum.py)
+Helpers for script.py
+
+### [test_framework/blocktools.py](test_framework/blocktools.py)
+Helper functions for creating blocks and transactions.
+
+
Notes
=====
You can run a single test by calling `qa/pull-tester/rpc-tests.sh <testname>`.
Run all possible tests with `qa/pull-tester/rpc-tests.sh -extended`.
Possible options:
```
-h, --help show this help message and exit
--nocleanup Leave bitcoinds and test.* datadir on exit or error
--noshutdown Don't stop bitcoinds after the test execution
--srcdir=SRCDIR Source directory containing bitcoind/bitcoin-cli (default:
../../src)
--tmpdir=TMPDIR Root directory for datadirs
--tracerpc Print out all RPC calls as they are made
```
If you set the environment variable `PYTHON_DEBUG=1` you will get some debug output (example: `PYTHON_DEBUG=1 qa/pull-tester/rpc-tests.sh wallet`).
A 200-block -regtest blockchain and wallets for four nodes
is created the first time a regression test is run and
is stored in the cache/ directory. Each node has 25 mature
blocks (25*50=1250 BTC) in its wallet.
After the first run, the cache/ blockchain and wallets are
copied into a temporary directory and used as the initial
test state.
If you get into a bad state, you should be able
to recover with:
```bash
rm -rf cache
killall bitcoind
```
+
+P2P test design notes
+---------------------
+
+## Mininode
+
+* ```mininode.py``` contains all the definitions for objects that pass
+over the network (```CBlock```, ```CTransaction```, etc, along with the network-level
+wrappers for them, ```msg_block```, ```msg_tx```, etc).
+
+* P2P tests have two threads. One thread handles all network communication
+with the bitcoind(s) being tested (using python's asyncore package); the other
+implements the test logic.
+
+* ```NodeConn``` is the class used to connect to a bitcoind. If you implement
+a callback class that derives from ```NodeConnCB``` and pass that to the
+```NodeConn``` object, your code will receive the appropriate callbacks when
+events of interest arrive. NOTE: be sure to call
+```self.create_callback_map()``` in your derived classes' ```__init__```
+function, so that the correct mappings are set up between p2p messages and your
+callback functions.
+
+* You can pass the same handler to multiple ```NodeConn```'s if you like, or pass
+different ones to each -- whatever makes the most sense for your test.
+
+* Call ```NetworkThread.start()``` after all ```NodeConn``` objects are created to
+start the networking thread. (Continue with the test logic in your existing
+thread.)
+
+* RPC calls are available in p2p tests.
+
+* Can be used to write free-form tests, where specific p2p-protocol behavior
+is tested. Examples: ```p2p-accept-block.py```, ```maxblocksinflight.py```.
+
+## Comptool
+
+* Testing framework for writing tests that compare the block/tx acceptance
+behavior of a bitcoind against 1 or more other bitcoind instances, or against
+known outcomes, or both.
+
+* Set the ```num_nodes``` variable (defined in ```ComparisonTestFramework```) to start up
+1 or more nodes. If using 1 node, then ```--testbinary``` can be used as a command line
+option to change the bitcoind binary used by the test. If using 2 or more nodes,
+then ```--refbinary``` can be optionally used to change the bitcoind that will be used
+on nodes 2 and up.
+
+* Implement a (generator) function called ```get_tests()``` which yields ```TestInstance```s.
+Each ```TestInstance``` consists of:
+ - a list of ```[object, outcome, hash]``` entries
+ * ```object``` is a ```CBlock```, ```CTransaction```, or
+ ```CBlockHeader```. ```CBlock```'s and ```CTransaction```'s are tested for
+ acceptance. ```CBlockHeader```s can be used so that the test runner can deliver
+ complete headers-chains when requested from the bitcoind, to allow writing
+ tests where blocks can be delivered out of order but still processed by
+ headers-first bitcoind's.
+ * ```outcome``` is ```True```, ```False```, or ```None```. If ```True```
+ or ```False```, the tip is compared with the expected tip -- either the
+ block passed in, or the hash specified as the optional 3rd entry. If
+ ```None``` is specified, then the test will compare all the bitcoind's
+ being tested to see if they all agree on what the best tip is.
+ * ```hash``` is the block hash of the tip to compare against. Optional to
+ specify; if left out then the hash of the block passed in will be used as
+ the expected tip. This allows for specifying an expected tip while testing
+ the handling of either invalid blocks or blocks delivered out of order,
+ which complete a longer chain.
+ - ```sync_every_block```: ```True/False```. If ```False```, then all blocks
+ are inv'ed together, and the test runner waits until the node receives the
+ last one, and tests only the last block for tip acceptance using the
+ outcome and specified tip. If ```True```, then each block is tested in
+ sequence and synced (this is slower when processing many blocks).
+ - ```sync_every_transaction```: ```True/False```. Analogous to
+ ```sync_every_block```, except if the outcome on the last tx is "None",
+ then the contents of the entire mempool are compared across all bitcoind
+ connections. If ```True``` or ```False```, then only the last tx's
+ acceptance is tested against the given outcome.
+
+* For examples of tests written in this framework, see
+ ```invalidblockrequest.py``` and ```p2p-fullblocktest.py```.
+
diff --git a/qa/rpc-tests/bipdersig-p2p.py b/qa/rpc-tests/bipdersig-p2p.py
index 41717377b2..ec1678cc2c 100755
--- a/qa/rpc-tests/bipdersig-p2p.py
+++ b/qa/rpc-tests/bipdersig-p2p.py
@@ -1,183 +1,190 @@
#!/usr/bin/env python2
#
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
from test_framework.test_framework import ComparisonTestFramework
from test_framework.util import *
from test_framework.mininode import CTransaction, NetworkThread
from test_framework.blocktools import create_coinbase, create_block
from test_framework.comptool import TestInstance, TestManager
from test_framework.script import CScript
from binascii import hexlify, unhexlify
import cStringIO
import time
# A canonical signature consists of:
# <30> <total len> <02> <len R> <R> <02> <len S> <S> <hashtype>
def unDERify(tx):
'''
Make the signature in vin 0 of a tx non-DER-compliant,
by adding padding after the S-value.
'''
scriptSig = CScript(tx.vin[0].scriptSig)
newscript = []
for i in scriptSig:
if (len(newscript) == 0):
newscript.append(i[0:-1] + '\0' + i[-1])
else:
newscript.append(i)
tx.vin[0].scriptSig = CScript(newscript)
'''
This test is meant to exercise BIP66 (DER SIG).
Connect to a single node.
Mine 2 (version 2) blocks (save the coinbases for later).
Generate 98 more version 2 blocks, verify the node accepts.
Mine 749 version 3 blocks, verify the node accepts.
Check that the new DERSIG rules are not enforced on the 750th version 3 block.
Check that the new DERSIG rules are enforced on the 751st version 3 block.
Mine 199 new version blocks.
Mine 1 old-version block.
Mine 1 new version block.
Mine 1 old version block, see that the node rejects.
'''
class BIP66Test(ComparisonTestFramework):
def __init__(self):
self.num_nodes = 1
def setup_network(self):
# Must set the blockversion for this test
self.nodes = start_nodes(1, self.options.tmpdir,
extra_args=[['-debug', '-whitelist=127.0.0.1', '-blockversion=2']],
binary=[self.options.testbinary])
def run_test(self):
test = TestManager(self, self.options.tmpdir)
test.add_all_connections(self.nodes)
NetworkThread().start() # Start up network handling in another thread
test.run()
def create_transaction(self, node, coinbase, to_address, amount):
from_txid = node.getblock(coinbase)['tx'][0]
inputs = [{ "txid" : from_txid, "vout" : 0}]
outputs = { to_address : amount }
rawtx = node.createrawtransaction(inputs, outputs)
signresult = node.signrawtransaction(rawtx)
tx = CTransaction()
f = cStringIO.StringIO(unhexlify(signresult['hex']))
tx.deserialize(f)
return tx
def get_tests(self):
self.coinbase_blocks = self.nodes[0].generate(2)
+ height = 3 # height of the next block to build
self.tip = int ("0x" + self.nodes[0].getbestblockhash() + "L", 0)
self.nodeaddress = self.nodes[0].getnewaddress()
self.last_block_time = time.time()
''' 98 more version 2 blocks '''
test_blocks = []
for i in xrange(98):
- block = create_block(self.tip, create_coinbase(2), self.last_block_time + 1)
+ block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
block.nVersion = 2
block.rehash()
block.solve()
test_blocks.append([block, True])
self.last_block_time += 1
self.tip = block.sha256
+ height += 1
yield TestInstance(test_blocks, sync_every_block=False)
''' Mine 749 version 3 blocks '''
test_blocks = []
for i in xrange(749):
- block = create_block(self.tip, create_coinbase(2), self.last_block_time + 1)
+ block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
block.nVersion = 3
block.rehash()
block.solve()
test_blocks.append([block, True])
self.last_block_time += 1
self.tip = block.sha256
+ height += 1
yield TestInstance(test_blocks, sync_every_block=False)
'''
Check that the new DERSIG rules are not enforced in the 750th
version 3 block.
'''
spendtx = self.create_transaction(self.nodes[0],
self.coinbase_blocks[0], self.nodeaddress, 1.0)
unDERify(spendtx)
spendtx.rehash()
- block = create_block(self.tip, create_coinbase(2), self.last_block_time + 1)
+ block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
block.nVersion = 3
block.vtx.append(spendtx)
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
block.solve()
self.last_block_time += 1
self.tip = block.sha256
+ height += 1
yield TestInstance([[block, True]])
'''
Check that the new DERSIG rules are enforced in the 751st version 3
block.
'''
spendtx = self.create_transaction(self.nodes[0],
self.coinbase_blocks[1], self.nodeaddress, 1.0)
unDERify(spendtx)
spendtx.rehash()
- block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1)
+ block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
block.nVersion = 3
block.vtx.append(spendtx)
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
block.solve()
self.last_block_time += 1
yield TestInstance([[block, False]])
''' Mine 199 new version blocks on last valid tip '''
test_blocks = []
for i in xrange(199):
- block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1)
+ block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
block.nVersion = 3
block.rehash()
block.solve()
test_blocks.append([block, True])
self.last_block_time += 1
self.tip = block.sha256
+ height += 1
yield TestInstance(test_blocks, sync_every_block=False)
''' Mine 1 old version block '''
- block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1)
+ block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
block.nVersion = 2
block.rehash()
block.solve()
self.last_block_time += 1
self.tip = block.sha256
+ height += 1
yield TestInstance([[block, True]])
''' Mine 1 new version block '''
- block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1)
+ block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
block.nVersion = 3
block.rehash()
block.solve()
self.last_block_time += 1
self.tip = block.sha256
+ height += 1
yield TestInstance([[block, True]])
''' Mine 1 old version block, should be invalid '''
- block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1)
+ block = create_block(self.tip, create_coinbase(height), self.last_block_time + 1)
block.nVersion = 2
block.rehash()
block.solve()
self.last_block_time += 1
yield TestInstance([[block, False]])
if __name__ == '__main__':
BIP66Test().main()
diff --git a/qa/rpc-tests/invalidblockrequest.py b/qa/rpc-tests/invalidblockrequest.py
index 64b8e26395..6a7980cd45 100755
--- a/qa/rpc-tests/invalidblockrequest.py
+++ b/qa/rpc-tests/invalidblockrequest.py
@@ -1,115 +1,119 @@
#!/usr/bin/env python2
#
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
from test_framework.test_framework import ComparisonTestFramework
from test_framework.util import *
from test_framework.comptool import TestManager, TestInstance
from test_framework.mininode import *
from test_framework.blocktools import *
import logging
import copy
import time
'''
In this test we connect to one node over p2p, and test block requests:
1) Valid blocks should be requested and become chain tip.
2) Invalid block with duplicated transaction should be re-requested.
3) Invalid block with bad coinbase value should be rejected and not
re-requested.
'''
# Use the ComparisonTestFramework with 1 node: only use --testbinary.
class InvalidBlockRequestTest(ComparisonTestFramework):
''' Can either run this test as 1 node with expected answers, or two and compare them.
Change the "outcome" variable from each TestInstance object to only do the comparison. '''
def __init__(self):
self.num_nodes = 1
def run_test(self):
test = TestManager(self, self.options.tmpdir)
test.add_all_connections(self.nodes)
self.tip = None
self.block_time = None
NetworkThread().start() # Start up network handling in another thread
test.run()
def get_tests(self):
if self.tip is None:
self.tip = int ("0x" + self.nodes[0].getbestblockhash() + "L", 0)
self.block_time = int(time.time())+1
'''
Create a new block with an anyone-can-spend coinbase
'''
- block = create_block(self.tip, create_coinbase(), self.block_time)
+ height = 1
+ block = create_block(self.tip, create_coinbase(height), self.block_time)
self.block_time += 1
block.solve()
# Save the coinbase for later
self.block1 = block
self.tip = block.sha256
+ height += 1
yield TestInstance([[block, True]])
'''
Now we need that block to mature so we can spend the coinbase.
'''
test = TestInstance(sync_every_block=False)
for i in xrange(100):
- block = create_block(self.tip, create_coinbase(), self.block_time)
+ block = create_block(self.tip, create_coinbase(height), self.block_time)
block.solve()
self.tip = block.sha256
self.block_time += 1
test.blocks_and_transactions.append([block, True])
+ height += 1
yield test
'''
Now we use merkle-root malleability to generate an invalid block with
same blockheader.
Manufacture a block with 3 transactions (coinbase, spend of prior
coinbase, spend of that spend). Duplicate the 3rd transaction to
leave merkle root and blockheader unchanged but invalidate the block.
'''
- block2 = create_block(self.tip, create_coinbase(), self.block_time)
+ block2 = create_block(self.tip, create_coinbase(height), self.block_time)
self.block_time += 1
# chr(81) is OP_TRUE
tx1 = create_transaction(self.block1.vtx[0], 0, chr(81), 50*100000000)
tx2 = create_transaction(tx1, 0, chr(81), 50*100000000)
block2.vtx.extend([tx1, tx2])
block2.hashMerkleRoot = block2.calc_merkle_root()
block2.rehash()
block2.solve()
orig_hash = block2.sha256
block2_orig = copy.deepcopy(block2)
# Mutate block 2
block2.vtx.append(tx2)
assert_equal(block2.hashMerkleRoot, block2.calc_merkle_root())
assert_equal(orig_hash, block2.rehash())
assert(block2_orig.vtx != block2.vtx)
self.tip = block2.sha256
yield TestInstance([[block2, False], [block2_orig, True]])
+ height += 1
'''
Make sure that a totally screwed up block is not valid.
'''
- block3 = create_block(self.tip, create_coinbase(), self.block_time)
+ block3 = create_block(self.tip, create_coinbase(height), self.block_time)
self.block_time += 1
block3.vtx[0].vout[0].nValue = 100*100000000 # Too high!
block3.vtx[0].sha256=None
block3.vtx[0].calc_sha256()
block3.hashMerkleRoot = block3.calc_merkle_root()
block3.rehash()
block3.solve()
yield TestInstance([[block3, False]])
if __name__ == '__main__':
InvalidBlockRequestTest().main()
diff --git a/qa/rpc-tests/p2p-acceptblock.py b/qa/rpc-tests/p2p-acceptblock.py
index 83c03eeb78..700deab207 100755
--- a/qa/rpc-tests/p2p-acceptblock.py
+++ b/qa/rpc-tests/p2p-acceptblock.py
@@ -1,291 +1,291 @@
#!/usr/bin/env python2
#
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
from test_framework.mininode import *
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
import time
from test_framework.blocktools import create_block, create_coinbase
'''
AcceptBlockTest -- test processing of unrequested blocks.
Since behavior differs when receiving unrequested blocks from whitelisted peers
versus non-whitelisted peers, this tests the behavior of both (effectively two
separate tests running in parallel).
Setup: two nodes, node0 and node1, not connected to each other. Node0 does not
whitelist localhost, but node1 does. They will each be on their own chain for
this test.
We have one NodeConn connection to each, test_node and white_node respectively.
The test:
1. Generate one block on each node, to leave IBD.
2. Mine a new block on each tip, and deliver to each node from node's peer.
The tip should advance.
3. Mine a block that forks the previous block, and deliver to each node from
corresponding peer.
Node0 should not process this block (just accept the header), because it is
unrequested and doesn't have more work than the tip.
Node1 should process because this is coming from a whitelisted peer.
4. Send another block that builds on the forking block.
Node0 should process this block but be stuck on the shorter chain, because
it's missing an intermediate block.
Node1 should reorg to this longer chain.
4b.Send 288 more blocks on the longer chain.
Node0 should process all but the last block (too far ahead in height).
Send all headers to Node1, and then send the last block in that chain.
Node1 should accept the block because it's coming from a whitelisted peer.
5. Send a duplicate of the block in #3 to Node0.
Node0 should not process the block because it is unrequested, and stay on
the shorter chain.
6. Send Node0 an inv for the height 3 block produced in #4 above.
Node0 should figure out that Node0 has the missing height 2 block and send a
getdata.
7. Send Node0 the missing block again.
Node0 should process and the tip should advance.
'''
# TestNode: bare-bones "peer". Used mostly as a conduit for a test to sending
# p2p messages to a node, generating the messages in the main testing logic.
class TestNode(NodeConnCB):
def __init__(self):
NodeConnCB.__init__(self)
self.create_callback_map()
self.connection = None
self.ping_counter = 1
self.last_pong = msg_pong()
def add_connection(self, conn):
self.connection = conn
# Track the last getdata message we receive (used in the test)
def on_getdata(self, conn, message):
self.last_getdata = message
# Spin until verack message is received from the node.
# We use this to signal that our test can begin. This
# is called from the testing thread, so it needs to acquire
# the global lock.
def wait_for_verack(self):
while True:
with mininode_lock:
if self.verack_received:
return
time.sleep(0.05)
# Wrapper for the NodeConn's send_message function
def send_message(self, message):
self.connection.send_message(message)
def on_pong(self, conn, message):
self.last_pong = message
# Sync up with the node after delivery of a block
def sync_with_ping(self, timeout=30):
self.connection.send_message(msg_ping(nonce=self.ping_counter))
received_pong = False
sleep_time = 0.05
while not received_pong and timeout > 0:
time.sleep(sleep_time)
timeout -= sleep_time
with mininode_lock:
if self.last_pong.nonce == self.ping_counter:
received_pong = True
self.ping_counter += 1
return received_pong
class AcceptBlockTest(BitcoinTestFramework):
def add_options(self, parser):
parser.add_option("--testbinary", dest="testbinary",
default=os.getenv("BITCOIND", "bitcoind"),
help="bitcoind binary to test")
def setup_chain(self):
initialize_chain_clean(self.options.tmpdir, 2)
def setup_network(self):
# Node0 will be used to test behavior of processing unrequested blocks
# from peers which are not whitelisted, while Node1 will be used for
# the whitelisted case.
self.nodes = []
self.nodes.append(start_node(0, self.options.tmpdir, ["-debug"],
binary=self.options.testbinary))
self.nodes.append(start_node(1, self.options.tmpdir,
["-debug", "-whitelist=127.0.0.1"],
binary=self.options.testbinary))
def run_test(self):
# Setup the p2p connections and start up the network thread.
test_node = TestNode() # connects to node0 (not whitelisted)
white_node = TestNode() # connects to node1 (whitelisted)
connections = []
connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], test_node))
connections.append(NodeConn('127.0.0.1', p2p_port(1), self.nodes[1], white_node))
test_node.add_connection(connections[0])
white_node.add_connection(connections[1])
NetworkThread().start() # Start up network handling in another thread
# Test logic begins here
test_node.wait_for_verack()
white_node.wait_for_verack()
# 1. Have both nodes mine a block (leave IBD)
[ n.generate(1) for n in self.nodes ]
tips = [ int ("0x" + n.getbestblockhash() + "L", 0) for n in self.nodes ]
# 2. Send one block that builds on each tip.
# This should be accepted.
blocks_h2 = [] # the height 2 blocks on each node's chain
block_time = time.time() + 1
for i in xrange(2):
- blocks_h2.append(create_block(tips[i], create_coinbase(), block_time))
+ blocks_h2.append(create_block(tips[i], create_coinbase(2), block_time))
blocks_h2[i].solve()
block_time += 1
test_node.send_message(msg_block(blocks_h2[0]))
white_node.send_message(msg_block(blocks_h2[1]))
[ x.sync_with_ping() for x in [test_node, white_node] ]
assert_equal(self.nodes[0].getblockcount(), 2)
assert_equal(self.nodes[1].getblockcount(), 2)
print "First height 2 block accepted by both nodes"
# 3. Send another block that builds on the original tip.
blocks_h2f = [] # Blocks at height 2 that fork off the main chain
for i in xrange(2):
- blocks_h2f.append(create_block(tips[i], create_coinbase(), blocks_h2[i].nTime+1))
+ blocks_h2f.append(create_block(tips[i], create_coinbase(2), blocks_h2[i].nTime+1))
blocks_h2f[i].solve()
test_node.send_message(msg_block(blocks_h2f[0]))
white_node.send_message(msg_block(blocks_h2f[1]))
[ x.sync_with_ping() for x in [test_node, white_node] ]
for x in self.nodes[0].getchaintips():
if x['hash'] == blocks_h2f[0].hash:
assert_equal(x['status'], "headers-only")
for x in self.nodes[1].getchaintips():
if x['hash'] == blocks_h2f[1].hash:
assert_equal(x['status'], "valid-headers")
print "Second height 2 block accepted only from whitelisted peer"
# 4. Now send another block that builds on the forking chain.
blocks_h3 = []
for i in xrange(2):
- blocks_h3.append(create_block(blocks_h2f[i].sha256, create_coinbase(), blocks_h2f[i].nTime+1))
+ blocks_h3.append(create_block(blocks_h2f[i].sha256, create_coinbase(3), blocks_h2f[i].nTime+1))
blocks_h3[i].solve()
test_node.send_message(msg_block(blocks_h3[0]))
white_node.send_message(msg_block(blocks_h3[1]))
[ x.sync_with_ping() for x in [test_node, white_node] ]
# Since the earlier block was not processed by node0, the new block
# can't be fully validated.
for x in self.nodes[0].getchaintips():
if x['hash'] == blocks_h3[0].hash:
assert_equal(x['status'], "headers-only")
# But this block should be accepted by node0 since it has more work.
try:
self.nodes[0].getblock(blocks_h3[0].hash)
print "Unrequested more-work block accepted from non-whitelisted peer"
except:
raise AssertionError("Unrequested more work block was not processed")
# Node1 should have accepted and reorged.
assert_equal(self.nodes[1].getblockcount(), 3)
print "Successfully reorged to length 3 chain from whitelisted peer"
# 4b. Now mine 288 more blocks and deliver; all should be processed but
# the last (height-too-high) on node0. Node1 should process the tip if
# we give it the headers chain leading to the tip.
tips = blocks_h3
headers_message = msg_headers()
all_blocks = [] # node0's blocks
for j in xrange(2):
for i in xrange(288):
- next_block = create_block(tips[j].sha256, create_coinbase(), tips[j].nTime+1)
+ next_block = create_block(tips[j].sha256, create_coinbase(i + 4), tips[j].nTime+1)
next_block.solve()
if j==0:
test_node.send_message(msg_block(next_block))
all_blocks.append(next_block)
else:
headers_message.headers.append(CBlockHeader(next_block))
tips[j] = next_block
time.sleep(2)
for x in all_blocks:
try:
self.nodes[0].getblock(x.hash)
if x == all_blocks[287]:
raise AssertionError("Unrequested block too far-ahead should have been ignored")
except:
if x == all_blocks[287]:
print "Unrequested block too far-ahead not processed"
else:
raise AssertionError("Unrequested block with more work should have been accepted")
headers_message.headers.pop() # Ensure the last block is unrequested
white_node.send_message(headers_message) # Send headers leading to tip
white_node.send_message(msg_block(tips[1])) # Now deliver the tip
try:
white_node.sync_with_ping()
self.nodes[1].getblock(tips[1].hash)
print "Unrequested block far ahead of tip accepted from whitelisted peer"
except:
raise AssertionError("Unrequested block from whitelisted peer not accepted")
# 5. Test handling of unrequested block on the node that didn't process
# Should still not be processed (even though it has a child that has more
# work).
test_node.send_message(msg_block(blocks_h2f[0]))
# Here, if the sleep is too short, the test could falsely succeed (if the
# node hasn't processed the block by the time the sleep returns, and then
# the node processes it and incorrectly advances the tip).
# But this would be caught later on, when we verify that an inv triggers
# a getdata request for this block.
test_node.sync_with_ping()
assert_equal(self.nodes[0].getblockcount(), 2)
print "Unrequested block that would complete more-work chain was ignored"
# 6. Try to get node to request the missing block.
# Poke the node with an inv for block at height 3 and see if that
# triggers a getdata on block 2 (it should if block 2 is missing).
with mininode_lock:
# Clear state so we can check the getdata request
test_node.last_getdata = None
test_node.send_message(msg_inv([CInv(2, blocks_h3[0].sha256)]))
test_node.sync_with_ping()
with mininode_lock:
getdata = test_node.last_getdata
# Check that the getdata includes the right block
assert_equal(getdata.inv[0].hash, blocks_h2f[0].sha256)
print "Inv at tip triggered getdata for unprocessed block"
# 7. Send the missing block for the third time (now it is requested)
test_node.send_message(msg_block(blocks_h2f[0]))
test_node.sync_with_ping()
assert_equal(self.nodes[0].getblockcount(), 290)
print "Successfully reorged to longer chain from non-whitelisted peer"
[ c.disconnect_node() for c in connections ]
if __name__ == '__main__':
AcceptBlockTest().main()
diff --git a/qa/rpc-tests/p2p-fullblocktest.py b/qa/rpc-tests/p2p-fullblocktest.py
new file mode 100755
index 0000000000..9555940cec
--- /dev/null
+++ b/qa/rpc-tests/p2p-fullblocktest.py
@@ -0,0 +1,272 @@
+#!/usr/bin/env python2
+
+#
+# Distributed under the MIT/X11 software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+#
+
+from test_framework.test_framework import ComparisonTestFramework
+from test_framework.util import *
+from test_framework.comptool import TestManager, TestInstance
+from test_framework.mininode import *
+from test_framework.blocktools import *
+import logging
+import copy
+import time
+import numbers
+from test_framework.key import CECKey
+from test_framework.script import CScript, CScriptOp, SignatureHash, SIGHASH_ALL, OP_TRUE
+
+class PreviousSpendableOutput(object):
+ def __init__(self, tx = CTransaction(), n = -1):
+ self.tx = tx
+ self.n = n # the output we're spending
+
+'''
+This reimplements tests from the bitcoinj/FullBlockTestGenerator used
+by the pull-tester.
+
+We use the testing framework in which we expect a particular answer from
+each test.
+'''
+
+class FullBlockTest(ComparisonTestFramework):
+
+ ''' Can either run this test as 1 node with expected answers, or two and compare them.
+ Change the "outcome" variable from each TestInstance object to only do the comparison. '''
+ def __init__(self):
+ self.num_nodes = 1
+ self.block_heights = {}
+ self.coinbase_key = CECKey()
+ self.coinbase_key.set_secretbytes(bytes("horsebattery"))
+ self.coinbase_pubkey = self.coinbase_key.get_pubkey()
+ self.block_time = int(time.time())+1
+ self.tip = None
+ self.blocks = {}
+
+ def run_test(self):
+ test = TestManager(self, self.options.tmpdir)
+ test.add_all_connections(self.nodes)
+ NetworkThread().start() # Start up network handling in another thread
+ test.run()
+
+ def add_transactions_to_block(self, block, tx_list):
+ [ tx.rehash() for tx in tx_list ]
+ block.vtx.extend(tx_list)
+ block.hashMerkleRoot = block.calc_merkle_root()
+ block.rehash()
+ return block
+
+ # Create a block on top of self.tip, and advance self.tip to point to the new block
+ # if spend is specified, then 1 satoshi will be spent from that to an anyone-can-spend output,
+ # and rest will go to fees.
+ def next_block(self, number, spend=None, additional_coinbase_value=0, script=None):
+ if self.tip == None:
+ base_block_hash = self.genesis_hash
+ else:
+ base_block_hash = self.tip.sha256
+ # First create the coinbase
+ height = self.block_heights[base_block_hash] + 1
+ coinbase = create_coinbase(height, self.coinbase_pubkey)
+ coinbase.vout[0].nValue += additional_coinbase_value
+ if (spend != None):
+ coinbase.vout[0].nValue += spend.tx.vout[spend.n].nValue - 1 # all but one satoshi to fees
+ coinbase.rehash()
+ block = create_block(base_block_hash, coinbase, self.block_time)
+ if (spend != None):
+ tx = CTransaction()
+ tx.vin.append(CTxIn(COutPoint(spend.tx.sha256, spend.n), "", 0xffffffff)) # no signature yet
+ # This copies the java comparison tool testing behavior: the first
+ # txout has a garbage scriptPubKey, "to make sure we're not
+ # pre-verifying too much" (?)
+ tx.vout.append(CTxOut(0, CScript([random.randint(0,255), height & 255])))
+ if script == None:
+ tx.vout.append(CTxOut(1, CScript([OP_TRUE])))
+ else:
+ tx.vout.append(CTxOut(1, script))
+ # Now sign it if necessary
+ scriptSig = ""
+ scriptPubKey = bytearray(spend.tx.vout[spend.n].scriptPubKey)
+ if (scriptPubKey[0] == OP_TRUE): # looks like an anyone-can-spend
+ scriptSig = CScript([OP_TRUE])
+ else:
+ # We have to actually sign it
+ (sighash, err) = SignatureHash(spend.tx.vout[spend.n].scriptPubKey, tx, 0, SIGHASH_ALL)
+ scriptSig = CScript([self.coinbase_key.sign(sighash) + bytes(bytearray([SIGHASH_ALL]))])
+ tx.vin[0].scriptSig = scriptSig
+ # Now add the transaction to the block
+ block = self.add_transactions_to_block(block, [tx])
+ block.solve()
+ self.tip = block
+ self.block_heights[block.sha256] = height
+ self.block_time += 1
+ assert number not in self.blocks
+ self.blocks[number] = block
+ return block
+
+ def get_tests(self):
+ self.genesis_hash = int(self.nodes[0].getbestblockhash(), 16)
+ self.block_heights[self.genesis_hash] = 0
+ spendable_outputs = []
+
+ # save the current tip so it can be spent by a later block
+ def save_spendable_output():
+ spendable_outputs.append(self.tip)
+
+ # get an output that we previous marked as spendable
+ def get_spendable_output():
+ return PreviousSpendableOutput(spendable_outputs.pop(0).vtx[0], 0)
+
+ # returns a test case that asserts that the current tip was accepted
+ def accepted():
+ return TestInstance([[self.tip, True]])
+
+ # returns a test case that asserts that the current tip was rejected
+ def rejected():
+ return TestInstance([[self.tip, False]])
+
+ # move the tip back to a previous block
+ def tip(number):
+ self.tip = self.blocks[number]
+
+ # creates a new block and advances the tip to that block
+ block = self.next_block
+
+
+ # Create a new block
+ block(0)
+ save_spendable_output()
+ yield accepted()
+
+
+ # Now we need that block to mature so we can spend the coinbase.
+ test = TestInstance(sync_every_block=False)
+ for i in range(100):
+ block(1000 + i)
+ test.blocks_and_transactions.append([self.tip, True])
+ save_spendable_output()
+ yield test
+
+
+ # Start by bulding a couple of blocks on top (which output is spent is in parentheses):
+ # genesis -> b1 (0) -> b2 (1)
+ out0 = get_spendable_output()
+ block(1, spend=out0)
+ save_spendable_output()
+ yield accepted()
+
+ out1 = get_spendable_output()
+ block(2, spend=out1)
+ # Inv again, then deliver twice (shouldn't break anything).
+ yield accepted()
+
+
+ # so fork like this:
+ #
+ # genesis -> b1 (0) -> b2 (1)
+ # \-> b3 (1)
+ #
+ # Nothing should happen at this point. We saw b2 first so it takes priority.
+ tip(1)
+ block(3, spend=out1)
+ # Deliver twice (should still not break anything)
+ yield rejected()
+
+
+ # Now we add another block to make the alternative chain longer.
+ #
+ # genesis -> b1 (0) -> b2 (1)
+ # \-> b3 (1) -> b4 (2)
+ out2 = get_spendable_output()
+ block(4, spend=out2)
+ yield accepted()
+
+
+ # ... and back to the first chain.
+ # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
+ # \-> b3 (1) -> b4 (2)
+ tip(2)
+ block(5, spend=out2)
+ save_spendable_output()
+ yield rejected()
+
+ out3 = get_spendable_output()
+ block(6, spend=out3)
+ yield accepted()
+
+
+ # Try to create a fork that double-spends
+ # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
+ # \-> b7 (2) -> b8 (4)
+ # \-> b3 (1) -> b4 (2)
+ tip(5)
+ block(7, spend=out2)
+ yield rejected()
+
+ out4 = get_spendable_output()
+ block(8, spend=out4)
+ yield rejected()
+
+
+ # Try to create a block that has too much fee
+ # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
+ # \-> b9 (4)
+ # \-> b3 (1) -> b4 (2)
+ tip(6)
+ block(9, spend=out4, additional_coinbase_value=1)
+ yield rejected()
+
+
+ # Create a fork that ends in a block with too much fee (the one that causes the reorg)
+ # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
+ # \-> b10 (3) -> b11 (4)
+ # \-> b3 (1) -> b4 (2)
+ tip(5)
+ block(10, spend=out3)
+ yield rejected()
+
+ block(11, spend=out4, additional_coinbase_value=1)
+ yield rejected()
+
+
+ # Try again, but with a valid fork first
+ # genesis -> b1 (0) -> b2 (1) -> b5 (2) -> b6 (3)
+ # \-> b12 (3) -> b13 (4) -> b14 (5)
+ # (b12 added last)
+ # \-> b3 (1) -> b4 (2)
+ tip(5)
+ b12 = block(12, spend=out3)
+ save_spendable_output()
+ #yield TestInstance([[b12, False]])
+ b13 = block(13, spend=out4)
+ # Deliver the block header for b12, and the block b13.
+ # b13 should be accepted but the tip won't advance until b12 is delivered.
+ yield TestInstance([[CBlockHeader(b12), None], [b13, False]])
+
+ save_spendable_output()
+ out5 = get_spendable_output()
+ # b14 is invalid, but the node won't know that until it tries to connect
+ # Tip still can't advance because b12 is missing
+ block(14, spend=out5, additional_coinbase_value=1)
+ yield rejected()
+
+ yield TestInstance([[b12, True, b13.sha256]]) # New tip should be b13.
+
+
+ # Test that a block with a lot of checksigs is okay
+ lots_of_checksigs = CScript([OP_CHECKSIG] * (1000000 / 50 - 1))
+ tip(13)
+ block(15, spend=out5, script=lots_of_checksigs)
+ yield accepted()
+
+
+ # Test that a block with too many checksigs is rejected
+ out6 = get_spendable_output()
+ too_many_checksigs = CScript([OP_CHECKSIG] * (1000000 / 50))
+ block(16, spend=out6, script=too_many_checksigs)
+ yield rejected()
+
+
+
+if __name__ == '__main__':
+ FullBlockTest().main()
diff --git a/qa/rpc-tests/script_test.py b/qa/rpc-tests/script_test.py
index 860fa56b64..afc44b51b5 100755
--- a/qa/rpc-tests/script_test.py
+++ b/qa/rpc-tests/script_test.py
@@ -1,253 +1,259 @@
#!/usr/bin/env python2
#
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
'''
Test notes:
This test uses the script_valid and script_invalid tests from the unittest
framework to do end-to-end testing where we compare that two nodes agree on
whether blocks containing a given test script are valid.
We generally ignore the script flags associated with each test (since we lack
the precision to test each script using those flags in this framework), but
for tests with SCRIPT_VERIFY_P2SH, we can use a block time after the BIP16
switchover date to try to test with that flag enabled (and for tests without
that flag, we use a block time before the switchover date).
NOTE: This test is very slow and may take more than 40 minutes to run.
'''
from test_framework.test_framework import ComparisonTestFramework
from test_framework.util import *
from test_framework.comptool import TestInstance, TestManager
from test_framework.mininode import *
from test_framework.blocktools import *
from test_framework.script import *
import logging
import copy
import json
script_valid_file = "../../src/test/data/script_valid.json"
script_invalid_file = "../../src/test/data/script_invalid.json"
# Pass in a set of json files to open.
class ScriptTestFile(object):
def __init__(self, files):
self.files = files
self.index = -1
self.data = []
def load_files(self):
for f in self.files:
self.data.extend(json.loads(open(os.path.dirname(os.path.abspath(__file__))+"/"+f).read()))
# Skip over records that are not long enough to be tests
def get_records(self):
while (self.index < len(self.data)):
if len(self.data[self.index]) >= 3:
yield self.data[self.index]
self.index += 1
# Helper for parsing the flags specified in the .json files
SCRIPT_VERIFY_NONE = 0
SCRIPT_VERIFY_P2SH = 1
SCRIPT_VERIFY_STRICTENC = 1 << 1
SCRIPT_VERIFY_DERSIG = 1 << 2
SCRIPT_VERIFY_LOW_S = 1 << 3
SCRIPT_VERIFY_NULLDUMMY = 1 << 4
SCRIPT_VERIFY_SIGPUSHONLY = 1 << 5
SCRIPT_VERIFY_MINIMALDATA = 1 << 6
SCRIPT_VERIFY_DISCOURAGE_UPGRADABLE_NOPS = 1 << 7
SCRIPT_VERIFY_CLEANSTACK = 1 << 8
flag_map = {
"": SCRIPT_VERIFY_NONE,
"NONE": SCRIPT_VERIFY_NONE,
"P2SH": SCRIPT_VERIFY_P2SH,
"STRICTENC": SCRIPT_VERIFY_STRICTENC,
"DERSIG": SCRIPT_VERIFY_DERSIG,
"LOW_S": SCRIPT_VERIFY_LOW_S,
"NULLDUMMY": SCRIPT_VERIFY_NULLDUMMY,
"SIGPUSHONLY": SCRIPT_VERIFY_SIGPUSHONLY,
"MINIMALDATA": SCRIPT_VERIFY_MINIMALDATA,
"DISCOURAGE_UPGRADABLE_NOPS": SCRIPT_VERIFY_DISCOURAGE_UPGRADABLE_NOPS,
"CLEANSTACK": SCRIPT_VERIFY_CLEANSTACK,
}
def ParseScriptFlags(flag_string):
flags = 0
for x in flag_string.split(","):
if x in flag_map:
flags |= flag_map[x]
else:
print "Error: unrecognized script flag: ", x
return flags
'''
Given a string that is a scriptsig or scriptpubkey from the .json files above,
convert it to a CScript()
'''
# Replicates behavior from core_read.cpp
def ParseScript(json_script):
script = json_script.split(" ")
parsed_script = CScript()
for x in script:
if len(x) == 0:
# Empty string, ignore.
pass
elif x.isdigit() or (len(x) >= 1 and x[0] == "-" and x[1:].isdigit()):
# Number
n = int(x, 0)
if (n == -1) or (n >= 1 and n <= 16):
parsed_script = CScript(bytes(parsed_script) + bytes(CScript([n])))
else:
parsed_script += CScriptNum(int(x, 0))
elif x.startswith("0x"):
# Raw hex data, inserted NOT pushed onto stack:
for i in xrange(2, len(x), 2):
parsed_script = CScript(bytes(parsed_script) + bytes(chr(int(x[i:i+2],16))))
elif x.startswith("'") and x.endswith("'") and len(x) >= 2:
# Single-quoted string, pushed as data.
parsed_script += CScript([x[1:-1]])
else:
# opcode, e.g. OP_ADD or ADD:
tryopname = "OP_" + x
if tryopname in OPCODES_BY_NAME:
parsed_script += CScriptOp(OPCODES_BY_NAME["OP_" + x])
else:
print "ParseScript: error parsing '%s'" % x
return ""
return parsed_script
class TestBuilder(object):
- def create_credit_tx(self, scriptPubKey):
+ def create_credit_tx(self, scriptPubKey, height):
# self.tx1 is a coinbase transaction, modeled after the one created by script_tests.cpp
# This allows us to reuse signatures created in the unit test framework.
- self.tx1 = create_coinbase() # this has a bip34 scriptsig,
+ self.tx1 = create_coinbase(height) # this has a bip34 scriptsig,
self.tx1.vin[0].scriptSig = CScript([0, 0]) # but this matches the unit tests
self.tx1.vout[0].nValue = 0
self.tx1.vout[0].scriptPubKey = scriptPubKey
self.tx1.rehash()
def create_spend_tx(self, scriptSig):
self.tx2 = create_transaction(self.tx1, 0, CScript(), 0)
self.tx2.vin[0].scriptSig = scriptSig
self.tx2.vout[0].scriptPubKey = CScript()
self.tx2.rehash()
def rehash(self):
self.tx1.rehash()
self.tx2.rehash()
# This test uses the (default) two nodes provided by ComparisonTestFramework,
# specified on the command line with --testbinary and --refbinary.
# See comptool.py
class ScriptTest(ComparisonTestFramework):
def run_test(self):
# Set up the comparison tool TestManager
test = TestManager(self, self.options.tmpdir)
test.add_all_connections(self.nodes)
# Load scripts
self.scripts = ScriptTestFile([script_valid_file, script_invalid_file])
self.scripts.load_files()
# Some variables we re-use between test instances (to build blocks)
self.tip = None
self.block_time = None
NetworkThread().start() # Start up network handling in another thread
test.run()
def generate_test_instance(self, pubkeystring, scriptsigstring):
scriptpubkey = ParseScript(pubkeystring)
scriptsig = ParseScript(scriptsigstring)
test = TestInstance(sync_every_block=False)
test_build = TestBuilder()
- test_build.create_credit_tx(scriptpubkey)
+ test_build.create_credit_tx(scriptpubkey, self.height)
test_build.create_spend_tx(scriptsig)
test_build.rehash()
block = create_block(self.tip, test_build.tx1, self.block_time)
self.block_time += 1
block.solve()
self.tip = block.sha256
+ self.height += 1
test.blocks_and_transactions = [[block, True]]
for i in xrange(100):
- block = create_block(self.tip, create_coinbase(), self.block_time)
+ block = create_block(self.tip, create_coinbase(self.height), self.block_time)
self.block_time += 1
block.solve()
self.tip = block.sha256
+ self.height += 1
test.blocks_and_transactions.append([block, True])
- block = create_block(self.tip, create_coinbase(), self.block_time)
+ block = create_block(self.tip, create_coinbase(self.height), self.block_time)
self.block_time += 1
block.vtx.append(test_build.tx2)
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
block.solve()
test.blocks_and_transactions.append([block, None])
return test
# This generates the tests for TestManager.
def get_tests(self):
self.tip = int ("0x" + self.nodes[0].getbestblockhash() + "L", 0)
self.block_time = 1333230000 # before the BIP16 switchover
+ self.height = 1
'''
Create a new block with an anyone-can-spend coinbase
'''
- block = create_block(self.tip, create_coinbase(), self.block_time)
+ block = create_block(self.tip, create_coinbase(self.height), self.block_time)
self.block_time += 1
block.solve()
self.tip = block.sha256
+ self.height += 1
yield TestInstance(objects=[[block, True]])
'''
Build out to 100 blocks total, maturing the coinbase.
'''
test = TestInstance(objects=[], sync_every_block=False, sync_every_tx=False)
for i in xrange(100):
- b = create_block(self.tip, create_coinbase(), self.block_time)
+ b = create_block(self.tip, create_coinbase(self.height), self.block_time)
b.solve()
test.blocks_and_transactions.append([b, True])
self.tip = b.sha256
self.block_time += 1
+ self.height += 1
yield test
''' Iterate through script tests. '''
counter = 0
for script_test in self.scripts.get_records():
''' Reset the blockchain to genesis block + 100 blocks. '''
if self.nodes[0].getblockcount() > 101:
self.nodes[0].invalidateblock(self.nodes[0].getblockhash(102))
self.nodes[1].invalidateblock(self.nodes[1].getblockhash(102))
self.tip = int ("0x" + self.nodes[0].getbestblockhash() + "L", 0)
+ self.height = 102
[scriptsig, scriptpubkey, flags] = script_test[0:3]
flags = ParseScriptFlags(flags)
# We can use block time to determine whether the nodes should be
# enforcing BIP16.
#
# We intentionally let the block time grow by 1 each time.
# This forces the block hashes to differ between tests, so that
# a call to invalidateblock doesn't interfere with a later test.
if (flags & SCRIPT_VERIFY_P2SH):
self.block_time = 1333238400 + counter # Advance to enforcing BIP16
else:
self.block_time = 1333230000 + counter # Before the BIP16 switchover
print "Script test: [%s]" % script_test
yield self.generate_test_instance(scriptpubkey, scriptsig)
counter += 1
if __name__ == '__main__':
ScriptTest().main()
diff --git a/qa/rpc-tests/test_framework/blockstore.py b/qa/rpc-tests/test_framework/blockstore.py
index c57b6df81b..b9775b477c 100644
--- a/qa/rpc-tests/test_framework/blockstore.py
+++ b/qa/rpc-tests/test_framework/blockstore.py
@@ -1,127 +1,138 @@
# BlockStore: a helper class that keeps a map of blocks and implements
# helper functions for responding to getheaders and getdata,
# and for constructing a getheaders message
#
from mininode import *
import dbm
class BlockStore(object):
def __init__(self, datadir):
self.blockDB = dbm.open(datadir + "/blocks", 'c')
self.currentBlock = 0L
+ self.headers_map = dict()
def close(self):
self.blockDB.close()
def get(self, blockhash):
serialized_block = None
try:
serialized_block = self.blockDB[repr(blockhash)]
except KeyError:
return None
f = cStringIO.StringIO(serialized_block)
ret = CBlock()
ret.deserialize(f)
ret.calc_sha256()
return ret
+ def get_header(self, blockhash):
+ try:
+ return self.headers_map[blockhash]
+ except KeyError:
+ return None
+
# Note: this pulls full blocks out of the database just to retrieve
# the headers -- perhaps we could keep a separate data structure
# to avoid this overhead.
def headers_for(self, locator, hash_stop, current_tip=None):
if current_tip is None:
current_tip = self.currentBlock
- current_block = self.get(current_tip)
- if current_block is None:
+ current_block_header = self.get_header(current_tip)
+ if current_block_header is None:
return None
response = msg_headers()
- headersList = [ CBlockHeader(current_block) ]
+ headersList = [ current_block_header ]
maxheaders = 2000
while (headersList[0].sha256 not in locator.vHave):
prevBlockHash = headersList[0].hashPrevBlock
- prevBlock = self.get(prevBlockHash)
- if prevBlock is not None:
- headersList.insert(0, CBlockHeader(prevBlock))
+ prevBlockHeader = self.get_header(prevBlockHash)
+ if prevBlockHeader is not None:
+ headersList.insert(0, prevBlockHeader)
else:
break
headersList = headersList[:maxheaders] # truncate if we have too many
hashList = [x.sha256 for x in headersList]
index = len(headersList)
if (hash_stop in hashList):
index = hashList.index(hash_stop)+1
response.headers = headersList[:index]
return response
def add_block(self, block):
block.calc_sha256()
try:
self.blockDB[repr(block.sha256)] = bytes(block.serialize())
except TypeError as e:
print "Unexpected error: ", sys.exc_info()[0], e.args
self.currentBlock = block.sha256
+ self.headers_map[block.sha256] = CBlockHeader(block)
+
+ def add_header(self, header):
+ self.headers_map[header.sha256] = header
def get_blocks(self, inv):
responses = []
for i in inv:
if (i.type == 2): # MSG_BLOCK
block = self.get(i.hash)
if block is not None:
responses.append(msg_block(block))
return responses
def get_locator(self, current_tip=None):
if current_tip is None:
current_tip = self.currentBlock
r = []
counter = 0
step = 1
lastBlock = self.get(current_tip)
while lastBlock is not None:
r.append(lastBlock.hashPrevBlock)
for i in range(step):
lastBlock = self.get(lastBlock.hashPrevBlock)
if lastBlock is None:
break
counter += 1
if counter > 10:
step *= 2
locator = CBlockLocator()
locator.vHave = r
return locator
class TxStore(object):
def __init__(self, datadir):
self.txDB = dbm.open(datadir + "/transactions", 'c')
def close(self):
self.txDB.close()
def get(self, txhash):
serialized_tx = None
try:
serialized_tx = self.txDB[repr(txhash)]
except KeyError:
return None
f = cStringIO.StringIO(serialized_tx)
ret = CTransaction()
ret.deserialize(f)
ret.calc_sha256()
return ret
def add_transaction(self, tx):
tx.calc_sha256()
try:
self.txDB[repr(tx.sha256)] = bytes(tx.serialize())
except TypeError as e:
print "Unexpected error: ", sys.exc_info()[0], e.args
def get_transactions(self, inv):
responses = []
for i in inv:
if (i.type == 1): # MSG_TX
tx = self.get(i.hash)
if tx is not None:
responses.append(msg_tx(tx))
return responses
diff --git a/qa/rpc-tests/test_framework/blocktools.py b/qa/rpc-tests/test_framework/blocktools.py
index f397fe7cd6..59aa8c15cc 100644
--- a/qa/rpc-tests/test_framework/blocktools.py
+++ b/qa/rpc-tests/test_framework/blocktools.py
@@ -1,65 +1,67 @@
# blocktools.py - utilities for manipulating blocks and transactions
#
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
from mininode import *
-from script import CScript, CScriptOp
+from script import CScript, CScriptOp, OP_TRUE, OP_CHECKSIG
# Create a block (with regtest difficulty)
def create_block(hashprev, coinbase, nTime=None):
block = CBlock()
if nTime is None:
import time
block.nTime = int(time.time()+600)
else:
block.nTime = nTime
block.hashPrevBlock = hashprev
block.nBits = 0x207fffff # Will break after a difficulty adjustment...
block.vtx.append(coinbase)
block.hashMerkleRoot = block.calc_merkle_root()
block.calc_sha256()
return block
def serialize_script_num(value):
r = bytearray(0)
if value == 0:
return r
neg = value < 0
absvalue = -value if neg else value
while (absvalue):
r.append(chr(absvalue & 0xff))
absvalue >>= 8
if r[-1] & 0x80:
r.append(0x80 if neg else 0)
elif neg:
r[-1] |= 0x80
return r
-counter=1
-# Create an anyone-can-spend coinbase transaction, assuming no miner fees
-def create_coinbase(heightAdjust = 0):
- global counter
+# Create a coinbase transaction, assuming no miner fees.
+# If pubkey is passed in, the coinbase output will be a P2PK output;
+# otherwise an anyone-can-spend output.
+def create_coinbase(height, pubkey = None):
coinbase = CTransaction()
coinbase.vin.append(CTxIn(COutPoint(0, 0xffffffff),
- ser_string(serialize_script_num(counter+heightAdjust)), 0xffffffff))
- counter += 1
+ ser_string(serialize_script_num(height)), 0xffffffff))
coinbaseoutput = CTxOut()
coinbaseoutput.nValue = 50*100000000
- halvings = int((counter+heightAdjust)/150) # regtest
+ halvings = int(height/150) # regtest
coinbaseoutput.nValue >>= halvings
- coinbaseoutput.scriptPubKey = ""
+ if (pubkey != None):
+ coinbaseoutput.scriptPubKey = CScript([pubkey, OP_CHECKSIG])
+ else:
+ coinbaseoutput.scriptPubKey = CScript([OP_TRUE])
coinbase.vout = [ coinbaseoutput ]
coinbase.calc_sha256()
return coinbase
# Create a transaction with an anyone-can-spend output, that spends the
# nth output of prevtx.
def create_transaction(prevtx, n, sig, value):
tx = CTransaction()
assert(n < len(prevtx.vout))
tx.vin.append(CTxIn(COutPoint(prevtx.sha256, n), sig, 0xffffffff))
tx.vout.append(CTxOut(value, ""))
tx.calc_sha256()
return tx
diff --git a/qa/rpc-tests/test_framework/comptool.py b/qa/rpc-tests/test_framework/comptool.py
index b945f1bf29..e0b3ce040d 100755
--- a/qa/rpc-tests/test_framework/comptool.py
+++ b/qa/rpc-tests/test_framework/comptool.py
@@ -1,343 +1,373 @@
#!/usr/bin/env python2
#
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
from mininode import *
from blockstore import BlockStore, TxStore
from util import p2p_port
'''
This is a tool for comparing two or more bitcoinds to each other
using a script provided.
To use, create a class that implements get_tests(), and pass it in
as the test generator to TestManager. get_tests() should be a python
generator that returns TestInstance objects. See below for definition.
'''
# TestNode behaves as follows:
# Configure with a BlockStore and TxStore
# on_inv: log the message but don't request
# on_headers: log the chain tip
# on_pong: update ping response map (for synchronization)
# on_getheaders: provide headers via BlockStore
# on_getdata: provide blocks via BlockStore
global mininode_lock
def wait_until(predicate, attempts=float('inf'), timeout=float('inf')):
attempt = 0
elapsed = 0
while attempt < attempts and elapsed < timeout:
with mininode_lock:
if predicate():
return True
attempt += 1
elapsed += 0.05
time.sleep(0.05)
return False
class TestNode(NodeConnCB):
def __init__(self, block_store, tx_store):
NodeConnCB.__init__(self)
self.create_callback_map()
self.conn = None
self.bestblockhash = None
self.block_store = block_store
self.block_request_map = {}
self.tx_store = tx_store
self.tx_request_map = {}
# When the pingmap is non-empty we're waiting for
# a response
self.pingMap = {}
self.lastInv = []
self.closed = False
def on_close(self, conn):
self.closed = True
def add_connection(self, conn):
self.conn = conn
def on_headers(self, conn, message):
if len(message.headers) > 0:
best_header = message.headers[-1]
best_header.calc_sha256()
self.bestblockhash = best_header.sha256
def on_getheaders(self, conn, message):
response = self.block_store.headers_for(message.locator, message.hashstop)
if response is not None:
conn.send_message(response)
def on_getdata(self, conn, message):
[conn.send_message(r) for r in self.block_store.get_blocks(message.inv)]
[conn.send_message(r) for r in self.tx_store.get_transactions(message.inv)]
for i in message.inv:
if i.type == 1:
self.tx_request_map[i.hash] = True
elif i.type == 2:
self.block_request_map[i.hash] = True
def on_inv(self, conn, message):
self.lastInv = [x.hash for x in message.inv]
def on_pong(self, conn, message):
try:
del self.pingMap[message.nonce]
except KeyError:
raise AssertionError("Got pong for unknown ping [%s]" % repr(message))
def send_inv(self, obj):
mtype = 2 if isinstance(obj, CBlock) else 1
self.conn.send_message(msg_inv([CInv(mtype, obj.sha256)]))
def send_getheaders(self):
# We ask for headers from their last tip.
m = msg_getheaders()
m.locator = self.block_store.get_locator(self.bestblockhash)
self.conn.send_message(m)
# This assumes BIP31
def send_ping(self, nonce):
self.pingMap[nonce] = True
self.conn.send_message(msg_ping(nonce))
def received_ping_response(self, nonce):
return nonce not in self.pingMap
def send_mempool(self):
self.lastInv = []
self.conn.send_message(msg_mempool())
# TestInstance:
#
# Instances of these are generated by the test generator, and fed into the
# comptool.
#
-# "blocks_and_transactions" should be an array of [obj, True/False/None]:
-# - obj is either a CBlock or a CTransaction, and
+# "blocks_and_transactions" should be an array of
+# [obj, True/False/None, hash/None]:
+# - obj is either a CBlock, CBlockHeader, or a CTransaction, and
# - the second value indicates whether the object should be accepted
# into the blockchain or mempool (for tests where we expect a certain
# answer), or "None" if we don't expect a certain answer and are just
# comparing the behavior of the nodes being tested.
+# - the third value is the hash to test the tip against (if None or omitted,
+# use the hash of the block)
+# - NOTE: if a block header, no test is performed; instead the header is
+# just added to the block_store. This is to facilitate block delivery
+# when communicating with headers-first clients (when withholding an
+# intermediate block).
# sync_every_block: if True, then each block will be inv'ed, synced, and
# nodes will be tested based on the outcome for the block. If False,
# then inv's accumulate until all blocks are processed (or max inv size
# is reached) and then sent out in one inv message. Then the final block
# will be synced across all connections, and the outcome of the final
# block will be tested.
# sync_every_tx: analogous to behavior for sync_every_block, except if outcome
# on the final tx is None, then contents of entire mempool are compared
# across all connections. (If outcome of final tx is specified as true
# or false, then only the last tx is tested against outcome.)
class TestInstance(object):
def __init__(self, objects=None, sync_every_block=True, sync_every_tx=False):
self.blocks_and_transactions = objects if objects else []
self.sync_every_block = sync_every_block
self.sync_every_tx = sync_every_tx
class TestManager(object):
def __init__(self, testgen, datadir):
self.test_generator = testgen
self.connections = []
self.test_nodes = []
self.block_store = BlockStore(datadir)
self.tx_store = TxStore(datadir)
self.ping_counter = 1
def add_all_connections(self, nodes):
for i in range(len(nodes)):
# Create a p2p connection to each node
test_node = TestNode(self.block_store, self.tx_store)
self.test_nodes.append(test_node)
self.connections.append(NodeConn('127.0.0.1', p2p_port(i), nodes[i], test_node))
# Make sure the TestNode (callback class) has a reference to its
# associated NodeConn
test_node.add_connection(self.connections[-1])
def wait_for_disconnections(self):
def disconnected():
return all(node.closed for node in self.test_nodes)
return wait_until(disconnected, timeout=10)
def wait_for_verack(self):
def veracked():
return all(node.verack_received for node in self.test_nodes)
return wait_until(veracked, timeout=10)
def wait_for_pings(self, counter):
def received_pongs():
return all(node.received_ping_response(counter) for node in self.test_nodes)
return wait_until(received_pongs)
# sync_blocks: Wait for all connections to request the blockhash given
# then send get_headers to find out the tip of each node, and synchronize
# the response by using a ping (and waiting for pong with same nonce).
def sync_blocks(self, blockhash, num_blocks):
def blocks_requested():
return all(
blockhash in node.block_request_map and node.block_request_map[blockhash]
for node in self.test_nodes
)
# --> error if not requested
if not wait_until(blocks_requested, attempts=20*num_blocks):
# print [ c.cb.block_request_map for c in self.connections ]
raise AssertionError("Not all nodes requested block")
- # --> Answer request (we did this inline!)
# Send getheaders message
[ c.cb.send_getheaders() for c in self.connections ]
# Send ping and wait for response -- synchronization hack
[ c.cb.send_ping(self.ping_counter) for c in self.connections ]
self.wait_for_pings(self.ping_counter)
self.ping_counter += 1
# Analogous to sync_block (see above)
def sync_transaction(self, txhash, num_events):
# Wait for nodes to request transaction (50ms sleep * 20 tries * num_events)
def transaction_requested():
return all(
txhash in node.tx_request_map and node.tx_request_map[txhash]
for node in self.test_nodes
)
# --> error if not requested
if not wait_until(transaction_requested, attempts=20*num_events):
# print [ c.cb.tx_request_map for c in self.connections ]
raise AssertionError("Not all nodes requested transaction")
- # --> Answer request (we did this inline!)
# Get the mempool
[ c.cb.send_mempool() for c in self.connections ]
# Send ping and wait for response -- synchronization hack
[ c.cb.send_ping(self.ping_counter) for c in self.connections ]
self.wait_for_pings(self.ping_counter)
self.ping_counter += 1
# Sort inv responses from each node
with mininode_lock:
[ c.cb.lastInv.sort() for c in self.connections ]
# Verify that the tip of each connection all agree with each other, and
# with the expected outcome (if given)
def check_results(self, blockhash, outcome):
with mininode_lock:
for c in self.connections:
if outcome is None:
if c.cb.bestblockhash != self.connections[0].cb.bestblockhash:
return False
elif ((c.cb.bestblockhash == blockhash) != outcome):
# print c.cb.bestblockhash, blockhash, outcome
return False
return True
# Either check that the mempools all agree with each other, or that
# txhash's presence in the mempool matches the outcome specified.
# This is somewhat of a strange comparison, in that we're either comparing
# a particular tx to an outcome, or the entire mempools altogether;
# perhaps it would be useful to add the ability to check explicitly that
# a particular tx's existence in the mempool is the same across all nodes.
def check_mempool(self, txhash, outcome):
with mininode_lock:
for c in self.connections:
if outcome is None:
# Make sure the mempools agree with each other
if c.cb.lastInv != self.connections[0].cb.lastInv:
# print c.rpc.getrawmempool()
return False
elif ((txhash in c.cb.lastInv) != outcome):
# print c.rpc.getrawmempool(), c.cb.lastInv
return False
return True
def run(self):
# Wait until verack is received
self.wait_for_verack()
test_number = 1
for test_instance in self.test_generator.get_tests():
# We use these variables to keep track of the last block
# and last transaction in the tests, which are used
# if we're not syncing on every block or every tx.
- [ block, block_outcome ] = [ None, None ]
+ [ block, block_outcome, tip ] = [ None, None, None ]
[ tx, tx_outcome ] = [ None, None ]
invqueue = []
- for b_or_t, outcome in test_instance.blocks_and_transactions:
+ for test_obj in test_instance.blocks_and_transactions:
+ b_or_t = test_obj[0]
+ outcome = test_obj[1]
# Determine if we're dealing with a block or tx
if isinstance(b_or_t, CBlock): # Block test runner
block = b_or_t
block_outcome = outcome
+ tip = block.sha256
+ # each test_obj can have an optional third argument
+ # to specify the tip we should compare with
+ # (default is to use the block being tested)
+ if len(test_obj) >= 3:
+ tip = test_obj[2]
+
# Add to shared block_store, set as current block
+ # If there was an open getdata request for the block
+ # previously, and we didn't have an entry in the
+ # block_store, then immediately deliver, because the
+ # node wouldn't send another getdata request while
+ # the earlier one is outstanding.
+ first_block_with_hash = True
+ if self.block_store.get(block.sha256) is not None:
+ first_block_with_hash = False
with mininode_lock:
self.block_store.add_block(block)
for c in self.connections:
- c.cb.block_request_map[block.sha256] = False
+ if first_block_with_hash and block.sha256 in c.cb.block_request_map and c.cb.block_request_map[block.sha256] == True:
+ # There was a previous request for this block hash
+ # Most likely, we delivered a header for this block
+ # but never had the block to respond to the getdata
+ c.send_message(msg_block(block))
+ else:
+ c.cb.block_request_map[block.sha256] = False
# Either send inv's to each node and sync, or add
# to invqueue for later inv'ing.
if (test_instance.sync_every_block):
[ c.cb.send_inv(block) for c in self.connections ]
self.sync_blocks(block.sha256, 1)
- if (not self.check_results(block.sha256, outcome)):
+ if (not self.check_results(tip, outcome)):
raise AssertionError("Test failed at test %d" % test_number)
else:
invqueue.append(CInv(2, block.sha256))
+ elif isinstance(b_or_t, CBlockHeader):
+ block_header = b_or_t
+ self.block_store.add_header(block_header)
else: # Tx test runner
assert(isinstance(b_or_t, CTransaction))
tx = b_or_t
tx_outcome = outcome
# Add to shared tx store and clear map entry
with mininode_lock:
self.tx_store.add_transaction(tx)
for c in self.connections:
c.cb.tx_request_map[tx.sha256] = False
# Again, either inv to all nodes or save for later
if (test_instance.sync_every_tx):
[ c.cb.send_inv(tx) for c in self.connections ]
self.sync_transaction(tx.sha256, 1)
if (not self.check_mempool(tx.sha256, outcome)):
raise AssertionError("Test failed at test %d" % test_number)
else:
invqueue.append(CInv(1, tx.sha256))
# Ensure we're not overflowing the inv queue
if len(invqueue) == MAX_INV_SZ:
[ c.send_message(msg_inv(invqueue)) for c in self.connections ]
invqueue = []
# Do final sync if we weren't syncing on every block or every tx.
if (not test_instance.sync_every_block and block is not None):
if len(invqueue) > 0:
[ c.send_message(msg_inv(invqueue)) for c in self.connections ]
invqueue = []
- self.sync_blocks(block.sha256,
- len(test_instance.blocks_and_transactions))
- if (not self.check_results(block.sha256, block_outcome)):
+ self.sync_blocks(block.sha256, len(test_instance.blocks_and_transactions))
+ if (not self.check_results(tip, block_outcome)):
raise AssertionError("Block test failed at test %d" % test_number)
if (not test_instance.sync_every_tx and tx is not None):
if len(invqueue) > 0:
[ c.send_message(msg_inv(invqueue)) for c in self.connections ]
invqueue = []
self.sync_transaction(tx.sha256, len(test_instance.blocks_and_transactions))
if (not self.check_mempool(tx.sha256, tx_outcome)):
raise AssertionError("Mempool test failed at test %d" % test_number)
print "Test %d: PASS" % test_number, [ c.rpc.getblockcount() for c in self.connections ]
test_number += 1
[ c.disconnect_node() for c in self.connections ]
self.wait_for_disconnections()
self.block_store.close()
self.tx_store.close()
diff --git a/qa/rpc-tests/test_framework/key.py b/qa/rpc-tests/test_framework/key.py
new file mode 100644
index 0000000000..ba3038fe04
--- /dev/null
+++ b/qa/rpc-tests/test_framework/key.py
@@ -0,0 +1,215 @@
+# Copyright (c) 2011 Sam Rushing
+#
+# key.py - OpenSSL wrapper
+#
+# This file is modified from python-bitcoinlib.
+#
+
+"""ECC secp256k1 crypto routines
+
+WARNING: This module does not mlock() secrets; your private keys may end up on
+disk in swap! Use with caution!
+"""
+
+import ctypes
+import ctypes.util
+import hashlib
+import sys
+
+ssl = ctypes.cdll.LoadLibrary(ctypes.util.find_library ('ssl') or 'libeay32')
+
+ssl.BN_new.restype = ctypes.c_void_p
+ssl.BN_new.argtypes = []
+
+ssl.BN_bin2bn.restype = ctypes.c_void_p
+ssl.BN_bin2bn.argtypes = [ctypes.c_char_p, ctypes.c_int, ctypes.c_void_p]
+
+ssl.BN_CTX_free.restype = None
+ssl.BN_CTX_free.argtypes = [ctypes.c_void_p]
+
+ssl.BN_CTX_new.restype = ctypes.c_void_p
+ssl.BN_CTX_new.argtypes = []
+
+ssl.ECDH_compute_key.restype = ctypes.c_int
+ssl.ECDH_compute_key.argtypes = [ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p, ctypes.c_void_p]
+
+ssl.ECDSA_sign.restype = ctypes.c_int
+ssl.ECDSA_sign.argtypes = [ctypes.c_int, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p]
+
+ssl.ECDSA_verify.restype = ctypes.c_int
+ssl.ECDSA_verify.argtypes = [ctypes.c_int, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p, ctypes.c_int, ctypes.c_void_p]
+
+ssl.EC_KEY_free.restype = None
+ssl.EC_KEY_free.argtypes = [ctypes.c_void_p]
+
+ssl.EC_KEY_new_by_curve_name.restype = ctypes.c_void_p
+ssl.EC_KEY_new_by_curve_name.argtypes = [ctypes.c_int]
+
+ssl.EC_KEY_get0_group.restype = ctypes.c_void_p
+ssl.EC_KEY_get0_group.argtypes = [ctypes.c_void_p]
+
+ssl.EC_KEY_get0_public_key.restype = ctypes.c_void_p
+ssl.EC_KEY_get0_public_key.argtypes = [ctypes.c_void_p]
+
+ssl.EC_KEY_set_private_key.restype = ctypes.c_int
+ssl.EC_KEY_set_private_key.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
+
+ssl.EC_KEY_set_conv_form.restype = None
+ssl.EC_KEY_set_conv_form.argtypes = [ctypes.c_void_p, ctypes.c_int]
+
+ssl.EC_KEY_set_public_key.restype = ctypes.c_int
+ssl.EC_KEY_set_public_key.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
+
+ssl.i2o_ECPublicKey.restype = ctypes.c_void_p
+ssl.i2o_ECPublicKey.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
+
+ssl.EC_POINT_new.restype = ctypes.c_void_p
+ssl.EC_POINT_new.argtypes = [ctypes.c_void_p]
+
+ssl.EC_POINT_free.restype = None
+ssl.EC_POINT_free.argtypes = [ctypes.c_void_p]
+
+ssl.EC_POINT_mul.restype = ctypes.c_int
+ssl.EC_POINT_mul.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p]
+
+# this specifies the curve used with ECDSA.
+NID_secp256k1 = 714 # from openssl/obj_mac.h
+
+# Thx to Sam Devlin for the ctypes magic 64-bit fix.
+def _check_result(val, func, args):
+ if val == 0:
+ raise ValueError
+ else:
+ return ctypes.c_void_p (val)
+
+ssl.EC_KEY_new_by_curve_name.restype = ctypes.c_void_p
+ssl.EC_KEY_new_by_curve_name.errcheck = _check_result
+
+class CECKey(object):
+ """Wrapper around OpenSSL's EC_KEY"""
+
+ POINT_CONVERSION_COMPRESSED = 2
+ POINT_CONVERSION_UNCOMPRESSED = 4
+
+ def __init__(self):
+ self.k = ssl.EC_KEY_new_by_curve_name(NID_secp256k1)
+
+ def __del__(self):
+ if ssl:
+ ssl.EC_KEY_free(self.k)
+ self.k = None
+
+ def set_secretbytes(self, secret):
+ priv_key = ssl.BN_bin2bn(secret, 32, ssl.BN_new())
+ group = ssl.EC_KEY_get0_group(self.k)
+ pub_key = ssl.EC_POINT_new(group)
+ ctx = ssl.BN_CTX_new()
+ if not ssl.EC_POINT_mul(group, pub_key, priv_key, None, None, ctx):
+ raise ValueError("Could not derive public key from the supplied secret.")
+ ssl.EC_POINT_mul(group, pub_key, priv_key, None, None, ctx)
+ ssl.EC_KEY_set_private_key(self.k, priv_key)
+ ssl.EC_KEY_set_public_key(self.k, pub_key)
+ ssl.EC_POINT_free(pub_key)
+ ssl.BN_CTX_free(ctx)
+ return self.k
+
+ def set_privkey(self, key):
+ self.mb = ctypes.create_string_buffer(key)
+ return ssl.d2i_ECPrivateKey(ctypes.byref(self.k), ctypes.byref(ctypes.pointer(self.mb)), len(key))
+
+ def set_pubkey(self, key):
+ self.mb = ctypes.create_string_buffer(key)
+ return ssl.o2i_ECPublicKey(ctypes.byref(self.k), ctypes.byref(ctypes.pointer(self.mb)), len(key))
+
+ def get_privkey(self):
+ size = ssl.i2d_ECPrivateKey(self.k, 0)
+ mb_pri = ctypes.create_string_buffer(size)
+ ssl.i2d_ECPrivateKey(self.k, ctypes.byref(ctypes.pointer(mb_pri)))
+ return mb_pri.raw
+
+ def get_pubkey(self):
+ size = ssl.i2o_ECPublicKey(self.k, 0)
+ mb = ctypes.create_string_buffer(size)
+ ssl.i2o_ECPublicKey(self.k, ctypes.byref(ctypes.pointer(mb)))
+ return mb.raw
+
+ def get_raw_ecdh_key(self, other_pubkey):
+ ecdh_keybuffer = ctypes.create_string_buffer(32)
+ r = ssl.ECDH_compute_key(ctypes.pointer(ecdh_keybuffer), 32,
+ ssl.EC_KEY_get0_public_key(other_pubkey.k),
+ self.k, 0)
+ if r != 32:
+ raise Exception('CKey.get_ecdh_key(): ECDH_compute_key() failed')
+ return ecdh_keybuffer.raw
+
+ def get_ecdh_key(self, other_pubkey, kdf=lambda k: hashlib.sha256(k).digest()):
+ # FIXME: be warned it's not clear what the kdf should be as a default
+ r = self.get_raw_ecdh_key(other_pubkey)
+ return kdf(r)
+
+ def sign(self, hash):
+ # FIXME: need unit tests for below cases
+ if not isinstance(hash, bytes):
+ raise TypeError('Hash must be bytes instance; got %r' % hash.__class__)
+ if len(hash) != 32:
+ raise ValueError('Hash must be exactly 32 bytes long')
+
+ sig_size0 = ctypes.c_uint32()
+ sig_size0.value = ssl.ECDSA_size(self.k)
+ mb_sig = ctypes.create_string_buffer(sig_size0.value)
+ result = ssl.ECDSA_sign(0, hash, len(hash), mb_sig, ctypes.byref(sig_size0), self.k)
+ assert 1 == result
+ return mb_sig.raw[:sig_size0.value]
+
+ def verify(self, hash, sig):
+ """Verify a DER signature"""
+ return ssl.ECDSA_verify(0, hash, len(hash), sig, len(sig), self.k) == 1
+
+ def set_compressed(self, compressed):
+ if compressed:
+ form = self.POINT_CONVERSION_COMPRESSED
+ else:
+ form = self.POINT_CONVERSION_UNCOMPRESSED
+ ssl.EC_KEY_set_conv_form(self.k, form)
+
+
+class CPubKey(bytes):
+ """An encapsulated public key
+
+ Attributes:
+
+ is_valid - Corresponds to CPubKey.IsValid()
+ is_fullyvalid - Corresponds to CPubKey.IsFullyValid()
+ is_compressed - Corresponds to CPubKey.IsCompressed()
+ """
+
+ def __new__(cls, buf, _cec_key=None):
+ self = super(CPubKey, cls).__new__(cls, buf)
+ if _cec_key is None:
+ _cec_key = CECKey()
+ self._cec_key = _cec_key
+ self.is_fullyvalid = _cec_key.set_pubkey(self) != 0
+ return self
+
+ @property
+ def is_valid(self):
+ return len(self) > 0
+
+ @property
+ def is_compressed(self):
+ return len(self) == 33
+
+ def verify(self, hash, sig):
+ return self._cec_key.verify(hash, sig)
+
+ def __str__(self):
+ return repr(self)
+
+ def __repr__(self):
+ # Always have represent as b'<secret>' so test cases don't have to
+ # change for py2/3
+ if sys.version > '3':
+ return '%s(%s)' % (self.__class__.__name__, super(CPubKey, self).__repr__())
+ else:
+ return '%s(b%s)' % (self.__class__.__name__, super(CPubKey, self).__repr__())
+
diff --git a/qa/rpc-tests/test_framework/script.py b/qa/rpc-tests/test_framework/script.py
index e37ab5d45a..0a78cf6fb1 100644
--- a/qa/rpc-tests/test_framework/script.py
+++ b/qa/rpc-tests/test_framework/script.py
@@ -1,896 +1,896 @@
#
# script.py
#
# This file is modified from python-bitcoinlib.
#
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
"""Scripts
Functionality to build scripts, as well as SignatureHash().
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from test_framework.mininode import CTransaction, CTxOut, hash256
import sys
bchr = chr
bord = ord
if sys.version > '3':
long = int
bchr = lambda x: bytes([x])
bord = lambda x: x
import copy
import struct
-import test_framework.bignum
+from test_framework.bignum import bn2vch
MAX_SCRIPT_SIZE = 10000
MAX_SCRIPT_ELEMENT_SIZE = 520
MAX_SCRIPT_OPCODES = 201
OPCODE_NAMES = {}
_opcode_instances = []
class CScriptOp(int):
"""A single script opcode"""
__slots__ = []
@staticmethod
def encode_op_pushdata(d):
"""Encode a PUSHDATA op, returning bytes"""
if len(d) < 0x4c:
return b'' + bchr(len(d)) + d # OP_PUSHDATA
elif len(d) <= 0xff:
return b'\x4c' + bchr(len(d)) + d # OP_PUSHDATA1
elif len(d) <= 0xffff:
return b'\x4d' + struct.pack(b'<H', len(d)) + d # OP_PUSHDATA2
elif len(d) <= 0xffffffff:
return b'\x4e' + struct.pack(b'<I', len(d)) + d # OP_PUSHDATA4
else:
raise ValueError("Data too long to encode in a PUSHDATA op")
@staticmethod
def encode_op_n(n):
"""Encode a small integer op, returning an opcode"""
if not (0 <= n <= 16):
raise ValueError('Integer must be in range 0 <= n <= 16, got %d' % n)
if n == 0:
return OP_0
else:
return CScriptOp(OP_1 + n-1)
def decode_op_n(self):
"""Decode a small integer opcode, returning an integer"""
if self == OP_0:
return 0
if not (self == OP_0 or OP_1 <= self <= OP_16):
raise ValueError('op %r is not an OP_N' % self)
return int(self - OP_1+1)
def is_small_int(self):
"""Return true if the op pushes a small integer to the stack"""
if 0x51 <= self <= 0x60 or self == 0:
return True
else:
return False
def __str__(self):
return repr(self)
def __repr__(self):
if self in OPCODE_NAMES:
return OPCODE_NAMES[self]
else:
return 'CScriptOp(0x%x)' % self
def __new__(cls, n):
try:
return _opcode_instances[n]
except IndexError:
assert len(_opcode_instances) == n
_opcode_instances.append(super(CScriptOp, cls).__new__(cls, n))
return _opcode_instances[n]
# Populate opcode instance table
for n in range(0xff+1):
CScriptOp(n)
# push value
OP_0 = CScriptOp(0x00)
OP_FALSE = OP_0
OP_PUSHDATA1 = CScriptOp(0x4c)
OP_PUSHDATA2 = CScriptOp(0x4d)
OP_PUSHDATA4 = CScriptOp(0x4e)
OP_1NEGATE = CScriptOp(0x4f)
OP_RESERVED = CScriptOp(0x50)
OP_1 = CScriptOp(0x51)
OP_TRUE=OP_1
OP_2 = CScriptOp(0x52)
OP_3 = CScriptOp(0x53)
OP_4 = CScriptOp(0x54)
OP_5 = CScriptOp(0x55)
OP_6 = CScriptOp(0x56)
OP_7 = CScriptOp(0x57)
OP_8 = CScriptOp(0x58)
OP_9 = CScriptOp(0x59)
OP_10 = CScriptOp(0x5a)
OP_11 = CScriptOp(0x5b)
OP_12 = CScriptOp(0x5c)
OP_13 = CScriptOp(0x5d)
OP_14 = CScriptOp(0x5e)
OP_15 = CScriptOp(0x5f)
OP_16 = CScriptOp(0x60)
# control
OP_NOP = CScriptOp(0x61)
OP_VER = CScriptOp(0x62)
OP_IF = CScriptOp(0x63)
OP_NOTIF = CScriptOp(0x64)
OP_VERIF = CScriptOp(0x65)
OP_VERNOTIF = CScriptOp(0x66)
OP_ELSE = CScriptOp(0x67)
OP_ENDIF = CScriptOp(0x68)
OP_VERIFY = CScriptOp(0x69)
OP_RETURN = CScriptOp(0x6a)
# stack ops
OP_TOALTSTACK = CScriptOp(0x6b)
OP_FROMALTSTACK = CScriptOp(0x6c)
OP_2DROP = CScriptOp(0x6d)
OP_2DUP = CScriptOp(0x6e)
OP_3DUP = CScriptOp(0x6f)
OP_2OVER = CScriptOp(0x70)
OP_2ROT = CScriptOp(0x71)
OP_2SWAP = CScriptOp(0x72)
OP_IFDUP = CScriptOp(0x73)
OP_DEPTH = CScriptOp(0x74)
OP_DROP = CScriptOp(0x75)
OP_DUP = CScriptOp(0x76)
OP_NIP = CScriptOp(0x77)
OP_OVER = CScriptOp(0x78)
OP_PICK = CScriptOp(0x79)
OP_ROLL = CScriptOp(0x7a)
OP_ROT = CScriptOp(0x7b)
OP_SWAP = CScriptOp(0x7c)
OP_TUCK = CScriptOp(0x7d)
# splice ops
OP_CAT = CScriptOp(0x7e)
OP_SUBSTR = CScriptOp(0x7f)
OP_LEFT = CScriptOp(0x80)
OP_RIGHT = CScriptOp(0x81)
OP_SIZE = CScriptOp(0x82)
# bit logic
OP_INVERT = CScriptOp(0x83)
OP_AND = CScriptOp(0x84)
OP_OR = CScriptOp(0x85)
OP_XOR = CScriptOp(0x86)
OP_EQUAL = CScriptOp(0x87)
OP_EQUALVERIFY = CScriptOp(0x88)
OP_RESERVED1 = CScriptOp(0x89)
OP_RESERVED2 = CScriptOp(0x8a)
# numeric
OP_1ADD = CScriptOp(0x8b)
OP_1SUB = CScriptOp(0x8c)
OP_2MUL = CScriptOp(0x8d)
OP_2DIV = CScriptOp(0x8e)
OP_NEGATE = CScriptOp(0x8f)
OP_ABS = CScriptOp(0x90)
OP_NOT = CScriptOp(0x91)
OP_0NOTEQUAL = CScriptOp(0x92)
OP_ADD = CScriptOp(0x93)
OP_SUB = CScriptOp(0x94)
OP_MUL = CScriptOp(0x95)
OP_DIV = CScriptOp(0x96)
OP_MOD = CScriptOp(0x97)
OP_LSHIFT = CScriptOp(0x98)
OP_RSHIFT = CScriptOp(0x99)
OP_BOOLAND = CScriptOp(0x9a)
OP_BOOLOR = CScriptOp(0x9b)
OP_NUMEQUAL = CScriptOp(0x9c)
OP_NUMEQUALVERIFY = CScriptOp(0x9d)
OP_NUMNOTEQUAL = CScriptOp(0x9e)
OP_LESSTHAN = CScriptOp(0x9f)
OP_GREATERTHAN = CScriptOp(0xa0)
OP_LESSTHANOREQUAL = CScriptOp(0xa1)
OP_GREATERTHANOREQUAL = CScriptOp(0xa2)
OP_MIN = CScriptOp(0xa3)
OP_MAX = CScriptOp(0xa4)
OP_WITHIN = CScriptOp(0xa5)
# crypto
OP_RIPEMD160 = CScriptOp(0xa6)
OP_SHA1 = CScriptOp(0xa7)
OP_SHA256 = CScriptOp(0xa8)
OP_HASH160 = CScriptOp(0xa9)
OP_HASH256 = CScriptOp(0xaa)
OP_CODESEPARATOR = CScriptOp(0xab)
OP_CHECKSIG = CScriptOp(0xac)
OP_CHECKSIGVERIFY = CScriptOp(0xad)
OP_CHECKMULTISIG = CScriptOp(0xae)
OP_CHECKMULTISIGVERIFY = CScriptOp(0xaf)
# expansion
OP_NOP1 = CScriptOp(0xb0)
OP_NOP2 = CScriptOp(0xb1)
OP_NOP3 = CScriptOp(0xb2)
OP_NOP4 = CScriptOp(0xb3)
OP_NOP5 = CScriptOp(0xb4)
OP_NOP6 = CScriptOp(0xb5)
OP_NOP7 = CScriptOp(0xb6)
OP_NOP8 = CScriptOp(0xb7)
OP_NOP9 = CScriptOp(0xb8)
OP_NOP10 = CScriptOp(0xb9)
# template matching params
OP_SMALLINTEGER = CScriptOp(0xfa)
OP_PUBKEYS = CScriptOp(0xfb)
OP_PUBKEYHASH = CScriptOp(0xfd)
OP_PUBKEY = CScriptOp(0xfe)
OP_INVALIDOPCODE = CScriptOp(0xff)
VALID_OPCODES = {
OP_1NEGATE,
OP_RESERVED,
OP_1,
OP_2,
OP_3,
OP_4,
OP_5,
OP_6,
OP_7,
OP_8,
OP_9,
OP_10,
OP_11,
OP_12,
OP_13,
OP_14,
OP_15,
OP_16,
OP_NOP,
OP_VER,
OP_IF,
OP_NOTIF,
OP_VERIF,
OP_VERNOTIF,
OP_ELSE,
OP_ENDIF,
OP_VERIFY,
OP_RETURN,
OP_TOALTSTACK,
OP_FROMALTSTACK,
OP_2DROP,
OP_2DUP,
OP_3DUP,
OP_2OVER,
OP_2ROT,
OP_2SWAP,
OP_IFDUP,
OP_DEPTH,
OP_DROP,
OP_DUP,
OP_NIP,
OP_OVER,
OP_PICK,
OP_ROLL,
OP_ROT,
OP_SWAP,
OP_TUCK,
OP_CAT,
OP_SUBSTR,
OP_LEFT,
OP_RIGHT,
OP_SIZE,
OP_INVERT,
OP_AND,
OP_OR,
OP_XOR,
OP_EQUAL,
OP_EQUALVERIFY,
OP_RESERVED1,
OP_RESERVED2,
OP_1ADD,
OP_1SUB,
OP_2MUL,
OP_2DIV,
OP_NEGATE,
OP_ABS,
OP_NOT,
OP_0NOTEQUAL,
OP_ADD,
OP_SUB,
OP_MUL,
OP_DIV,
OP_MOD,
OP_LSHIFT,
OP_RSHIFT,
OP_BOOLAND,
OP_BOOLOR,
OP_NUMEQUAL,
OP_NUMEQUALVERIFY,
OP_NUMNOTEQUAL,
OP_LESSTHAN,
OP_GREATERTHAN,
OP_LESSTHANOREQUAL,
OP_GREATERTHANOREQUAL,
OP_MIN,
OP_MAX,
OP_WITHIN,
OP_RIPEMD160,
OP_SHA1,
OP_SHA256,
OP_HASH160,
OP_HASH256,
OP_CODESEPARATOR,
OP_CHECKSIG,
OP_CHECKSIGVERIFY,
OP_CHECKMULTISIG,
OP_CHECKMULTISIGVERIFY,
OP_NOP1,
OP_NOP2,
OP_NOP3,
OP_NOP4,
OP_NOP5,
OP_NOP6,
OP_NOP7,
OP_NOP8,
OP_NOP9,
OP_NOP10,
OP_SMALLINTEGER,
OP_PUBKEYS,
OP_PUBKEYHASH,
OP_PUBKEY,
}
OPCODE_NAMES.update({
OP_0 : 'OP_0',
OP_PUSHDATA1 : 'OP_PUSHDATA1',
OP_PUSHDATA2 : 'OP_PUSHDATA2',
OP_PUSHDATA4 : 'OP_PUSHDATA4',
OP_1NEGATE : 'OP_1NEGATE',
OP_RESERVED : 'OP_RESERVED',
OP_1 : 'OP_1',
OP_2 : 'OP_2',
OP_3 : 'OP_3',
OP_4 : 'OP_4',
OP_5 : 'OP_5',
OP_6 : 'OP_6',
OP_7 : 'OP_7',
OP_8 : 'OP_8',
OP_9 : 'OP_9',
OP_10 : 'OP_10',
OP_11 : 'OP_11',
OP_12 : 'OP_12',
OP_13 : 'OP_13',
OP_14 : 'OP_14',
OP_15 : 'OP_15',
OP_16 : 'OP_16',
OP_NOP : 'OP_NOP',
OP_VER : 'OP_VER',
OP_IF : 'OP_IF',
OP_NOTIF : 'OP_NOTIF',
OP_VERIF : 'OP_VERIF',
OP_VERNOTIF : 'OP_VERNOTIF',
OP_ELSE : 'OP_ELSE',
OP_ENDIF : 'OP_ENDIF',
OP_VERIFY : 'OP_VERIFY',
OP_RETURN : 'OP_RETURN',
OP_TOALTSTACK : 'OP_TOALTSTACK',
OP_FROMALTSTACK : 'OP_FROMALTSTACK',
OP_2DROP : 'OP_2DROP',
OP_2DUP : 'OP_2DUP',
OP_3DUP : 'OP_3DUP',
OP_2OVER : 'OP_2OVER',
OP_2ROT : 'OP_2ROT',
OP_2SWAP : 'OP_2SWAP',
OP_IFDUP : 'OP_IFDUP',
OP_DEPTH : 'OP_DEPTH',
OP_DROP : 'OP_DROP',
OP_DUP : 'OP_DUP',
OP_NIP : 'OP_NIP',
OP_OVER : 'OP_OVER',
OP_PICK : 'OP_PICK',
OP_ROLL : 'OP_ROLL',
OP_ROT : 'OP_ROT',
OP_SWAP : 'OP_SWAP',
OP_TUCK : 'OP_TUCK',
OP_CAT : 'OP_CAT',
OP_SUBSTR : 'OP_SUBSTR',
OP_LEFT : 'OP_LEFT',
OP_RIGHT : 'OP_RIGHT',
OP_SIZE : 'OP_SIZE',
OP_INVERT : 'OP_INVERT',
OP_AND : 'OP_AND',
OP_OR : 'OP_OR',
OP_XOR : 'OP_XOR',
OP_EQUAL : 'OP_EQUAL',
OP_EQUALVERIFY : 'OP_EQUALVERIFY',
OP_RESERVED1 : 'OP_RESERVED1',
OP_RESERVED2 : 'OP_RESERVED2',
OP_1ADD : 'OP_1ADD',
OP_1SUB : 'OP_1SUB',
OP_2MUL : 'OP_2MUL',
OP_2DIV : 'OP_2DIV',
OP_NEGATE : 'OP_NEGATE',
OP_ABS : 'OP_ABS',
OP_NOT : 'OP_NOT',
OP_0NOTEQUAL : 'OP_0NOTEQUAL',
OP_ADD : 'OP_ADD',
OP_SUB : 'OP_SUB',
OP_MUL : 'OP_MUL',
OP_DIV : 'OP_DIV',
OP_MOD : 'OP_MOD',
OP_LSHIFT : 'OP_LSHIFT',
OP_RSHIFT : 'OP_RSHIFT',
OP_BOOLAND : 'OP_BOOLAND',
OP_BOOLOR : 'OP_BOOLOR',
OP_NUMEQUAL : 'OP_NUMEQUAL',
OP_NUMEQUALVERIFY : 'OP_NUMEQUALVERIFY',
OP_NUMNOTEQUAL : 'OP_NUMNOTEQUAL',
OP_LESSTHAN : 'OP_LESSTHAN',
OP_GREATERTHAN : 'OP_GREATERTHAN',
OP_LESSTHANOREQUAL : 'OP_LESSTHANOREQUAL',
OP_GREATERTHANOREQUAL : 'OP_GREATERTHANOREQUAL',
OP_MIN : 'OP_MIN',
OP_MAX : 'OP_MAX',
OP_WITHIN : 'OP_WITHIN',
OP_RIPEMD160 : 'OP_RIPEMD160',
OP_SHA1 : 'OP_SHA1',
OP_SHA256 : 'OP_SHA256',
OP_HASH160 : 'OP_HASH160',
OP_HASH256 : 'OP_HASH256',
OP_CODESEPARATOR : 'OP_CODESEPARATOR',
OP_CHECKSIG : 'OP_CHECKSIG',
OP_CHECKSIGVERIFY : 'OP_CHECKSIGVERIFY',
OP_CHECKMULTISIG : 'OP_CHECKMULTISIG',
OP_CHECKMULTISIGVERIFY : 'OP_CHECKMULTISIGVERIFY',
OP_NOP1 : 'OP_NOP1',
OP_NOP2 : 'OP_NOP2',
OP_NOP3 : 'OP_NOP3',
OP_NOP4 : 'OP_NOP4',
OP_NOP5 : 'OP_NOP5',
OP_NOP6 : 'OP_NOP6',
OP_NOP7 : 'OP_NOP7',
OP_NOP8 : 'OP_NOP8',
OP_NOP9 : 'OP_NOP9',
OP_NOP10 : 'OP_NOP10',
OP_SMALLINTEGER : 'OP_SMALLINTEGER',
OP_PUBKEYS : 'OP_PUBKEYS',
OP_PUBKEYHASH : 'OP_PUBKEYHASH',
OP_PUBKEY : 'OP_PUBKEY',
OP_INVALIDOPCODE : 'OP_INVALIDOPCODE',
})
OPCODES_BY_NAME = {
'OP_0' : OP_0,
'OP_PUSHDATA1' : OP_PUSHDATA1,
'OP_PUSHDATA2' : OP_PUSHDATA2,
'OP_PUSHDATA4' : OP_PUSHDATA4,
'OP_1NEGATE' : OP_1NEGATE,
'OP_RESERVED' : OP_RESERVED,
'OP_1' : OP_1,
'OP_2' : OP_2,
'OP_3' : OP_3,
'OP_4' : OP_4,
'OP_5' : OP_5,
'OP_6' : OP_6,
'OP_7' : OP_7,
'OP_8' : OP_8,
'OP_9' : OP_9,
'OP_10' : OP_10,
'OP_11' : OP_11,
'OP_12' : OP_12,
'OP_13' : OP_13,
'OP_14' : OP_14,
'OP_15' : OP_15,
'OP_16' : OP_16,
'OP_NOP' : OP_NOP,
'OP_VER' : OP_VER,
'OP_IF' : OP_IF,
'OP_NOTIF' : OP_NOTIF,
'OP_VERIF' : OP_VERIF,
'OP_VERNOTIF' : OP_VERNOTIF,
'OP_ELSE' : OP_ELSE,
'OP_ENDIF' : OP_ENDIF,
'OP_VERIFY' : OP_VERIFY,
'OP_RETURN' : OP_RETURN,
'OP_TOALTSTACK' : OP_TOALTSTACK,
'OP_FROMALTSTACK' : OP_FROMALTSTACK,
'OP_2DROP' : OP_2DROP,
'OP_2DUP' : OP_2DUP,
'OP_3DUP' : OP_3DUP,
'OP_2OVER' : OP_2OVER,
'OP_2ROT' : OP_2ROT,
'OP_2SWAP' : OP_2SWAP,
'OP_IFDUP' : OP_IFDUP,
'OP_DEPTH' : OP_DEPTH,
'OP_DROP' : OP_DROP,
'OP_DUP' : OP_DUP,
'OP_NIP' : OP_NIP,
'OP_OVER' : OP_OVER,
'OP_PICK' : OP_PICK,
'OP_ROLL' : OP_ROLL,
'OP_ROT' : OP_ROT,
'OP_SWAP' : OP_SWAP,
'OP_TUCK' : OP_TUCK,
'OP_CAT' : OP_CAT,
'OP_SUBSTR' : OP_SUBSTR,
'OP_LEFT' : OP_LEFT,
'OP_RIGHT' : OP_RIGHT,
'OP_SIZE' : OP_SIZE,
'OP_INVERT' : OP_INVERT,
'OP_AND' : OP_AND,
'OP_OR' : OP_OR,
'OP_XOR' : OP_XOR,
'OP_EQUAL' : OP_EQUAL,
'OP_EQUALVERIFY' : OP_EQUALVERIFY,
'OP_RESERVED1' : OP_RESERVED1,
'OP_RESERVED2' : OP_RESERVED2,
'OP_1ADD' : OP_1ADD,
'OP_1SUB' : OP_1SUB,
'OP_2MUL' : OP_2MUL,
'OP_2DIV' : OP_2DIV,
'OP_NEGATE' : OP_NEGATE,
'OP_ABS' : OP_ABS,
'OP_NOT' : OP_NOT,
'OP_0NOTEQUAL' : OP_0NOTEQUAL,
'OP_ADD' : OP_ADD,
'OP_SUB' : OP_SUB,
'OP_MUL' : OP_MUL,
'OP_DIV' : OP_DIV,
'OP_MOD' : OP_MOD,
'OP_LSHIFT' : OP_LSHIFT,
'OP_RSHIFT' : OP_RSHIFT,
'OP_BOOLAND' : OP_BOOLAND,
'OP_BOOLOR' : OP_BOOLOR,
'OP_NUMEQUAL' : OP_NUMEQUAL,
'OP_NUMEQUALVERIFY' : OP_NUMEQUALVERIFY,
'OP_NUMNOTEQUAL' : OP_NUMNOTEQUAL,
'OP_LESSTHAN' : OP_LESSTHAN,
'OP_GREATERTHAN' : OP_GREATERTHAN,
'OP_LESSTHANOREQUAL' : OP_LESSTHANOREQUAL,
'OP_GREATERTHANOREQUAL' : OP_GREATERTHANOREQUAL,
'OP_MIN' : OP_MIN,
'OP_MAX' : OP_MAX,
'OP_WITHIN' : OP_WITHIN,
'OP_RIPEMD160' : OP_RIPEMD160,
'OP_SHA1' : OP_SHA1,
'OP_SHA256' : OP_SHA256,
'OP_HASH160' : OP_HASH160,
'OP_HASH256' : OP_HASH256,
'OP_CODESEPARATOR' : OP_CODESEPARATOR,
'OP_CHECKSIG' : OP_CHECKSIG,
'OP_CHECKSIGVERIFY' : OP_CHECKSIGVERIFY,
'OP_CHECKMULTISIG' : OP_CHECKMULTISIG,
'OP_CHECKMULTISIGVERIFY' : OP_CHECKMULTISIGVERIFY,
'OP_NOP1' : OP_NOP1,
'OP_NOP2' : OP_NOP2,
'OP_NOP3' : OP_NOP3,
'OP_NOP4' : OP_NOP4,
'OP_NOP5' : OP_NOP5,
'OP_NOP6' : OP_NOP6,
'OP_NOP7' : OP_NOP7,
'OP_NOP8' : OP_NOP8,
'OP_NOP9' : OP_NOP9,
'OP_NOP10' : OP_NOP10,
'OP_SMALLINTEGER' : OP_SMALLINTEGER,
'OP_PUBKEYS' : OP_PUBKEYS,
'OP_PUBKEYHASH' : OP_PUBKEYHASH,
'OP_PUBKEY' : OP_PUBKEY,
}
class CScriptInvalidError(Exception):
"""Base class for CScript exceptions"""
pass
class CScriptTruncatedPushDataError(CScriptInvalidError):
"""Invalid pushdata due to truncation"""
def __init__(self, msg, data):
self.data = data
super(CScriptTruncatedPushDataError, self).__init__(msg)
# This is used, eg, for blockchain heights in coinbase scripts (bip34)
class CScriptNum(object):
def __init__(self, d=0):
self.value = d
@staticmethod
def encode(obj):
r = bytearray(0)
if obj.value == 0:
return bytes(r)
neg = obj.value < 0
absvalue = -obj.value if neg else obj.value
while (absvalue):
r.append(chr(absvalue & 0xff))
absvalue >>= 8
if r[-1] & 0x80:
r.append(0x80 if neg else 0)
elif neg:
r[-1] |= 0x80
return bytes(bchr(len(r)) + r)
class CScript(bytes):
"""Serialized script
A bytes subclass, so you can use this directly whenever bytes are accepted.
Note that this means that indexing does *not* work - you'll get an index by
byte rather than opcode. This format was chosen for efficiency so that the
general case would not require creating a lot of little CScriptOP objects.
iter(script) however does iterate by opcode.
"""
@classmethod
def __coerce_instance(cls, other):
# Coerce other into bytes
if isinstance(other, CScriptOp):
other = bchr(other)
elif isinstance(other, CScriptNum):
if (other.value == 0):
other = bchr(CScriptOp(OP_0))
else:
other = CScriptNum.encode(other)
elif isinstance(other, (int, long)):
if 0 <= other <= 16:
other = bytes(bchr(CScriptOp.encode_op_n(other)))
elif other == -1:
other = bytes(bchr(OP_1NEGATE))
else:
- other = CScriptOp.encode_op_pushdata(bignum.bn2vch(other))
+ other = CScriptOp.encode_op_pushdata(bn2vch(other))
elif isinstance(other, (bytes, bytearray)):
other = CScriptOp.encode_op_pushdata(other)
return other
def __add__(self, other):
# Do the coercion outside of the try block so that errors in it are
# noticed.
other = self.__coerce_instance(other)
try:
# bytes.__add__ always returns bytes instances unfortunately
return CScript(super(CScript, self).__add__(other))
except TypeError:
raise TypeError('Can not add a %r instance to a CScript' % other.__class__)
def join(self, iterable):
# join makes no sense for a CScript()
raise NotImplementedError
def __new__(cls, value=b''):
if isinstance(value, bytes) or isinstance(value, bytearray):
return super(CScript, cls).__new__(cls, value)
else:
def coerce_iterable(iterable):
for instance in iterable:
yield cls.__coerce_instance(instance)
# Annoyingly on both python2 and python3 bytes.join() always
# returns a bytes instance even when subclassed.
return super(CScript, cls).__new__(cls, b''.join(coerce_iterable(value)))
def raw_iter(self):
"""Raw iteration
Yields tuples of (opcode, data, sop_idx) so that the different possible
PUSHDATA encodings can be accurately distinguished, as well as
determining the exact opcode byte indexes. (sop_idx)
"""
i = 0
while i < len(self):
sop_idx = i
opcode = bord(self[i])
i += 1
if opcode > OP_PUSHDATA4:
yield (opcode, None, sop_idx)
else:
datasize = None
pushdata_type = None
if opcode < OP_PUSHDATA1:
pushdata_type = 'PUSHDATA(%d)' % opcode
datasize = opcode
elif opcode == OP_PUSHDATA1:
pushdata_type = 'PUSHDATA1'
if i >= len(self):
raise CScriptInvalidError('PUSHDATA1: missing data length')
datasize = bord(self[i])
i += 1
elif opcode == OP_PUSHDATA2:
pushdata_type = 'PUSHDATA2'
if i + 1 >= len(self):
raise CScriptInvalidError('PUSHDATA2: missing data length')
datasize = bord(self[i]) + (bord(self[i+1]) << 8)
i += 2
elif opcode == OP_PUSHDATA4:
pushdata_type = 'PUSHDATA4'
if i + 3 >= len(self):
raise CScriptInvalidError('PUSHDATA4: missing data length')
datasize = bord(self[i]) + (bord(self[i+1]) << 8) + (bord(self[i+2]) << 16) + (bord(self[i+3]) << 24)
i += 4
else:
assert False # shouldn't happen
data = bytes(self[i:i+datasize])
# Check for truncation
if len(data) < datasize:
raise CScriptTruncatedPushDataError('%s: truncated data' % pushdata_type, data)
i += datasize
yield (opcode, data, sop_idx)
def __iter__(self):
"""'Cooked' iteration
Returns either a CScriptOP instance, an integer, or bytes, as
appropriate.
See raw_iter() if you need to distinguish the different possible
PUSHDATA encodings.
"""
for (opcode, data, sop_idx) in self.raw_iter():
if data is not None:
yield data
else:
opcode = CScriptOp(opcode)
if opcode.is_small_int():
yield opcode.decode_op_n()
else:
yield CScriptOp(opcode)
def __repr__(self):
# For Python3 compatibility add b before strings so testcases don't
# need to change
def _repr(o):
if isinstance(o, bytes):
return "x('%s')" % binascii.hexlify(o).decode('utf8')
else:
return repr(o)
ops = []
i = iter(self)
while True:
op = None
try:
op = _repr(next(i))
except CScriptTruncatedPushDataError as err:
op = '%s...<ERROR: %s>' % (_repr(err.data), err)
break
except CScriptInvalidError as err:
op = '<ERROR: %s>' % err
break
except StopIteration:
break
finally:
if op is not None:
ops.append(op)
return "CScript([%s])" % ', '.join(ops)
def GetSigOpCount(self, fAccurate):
"""Get the SigOp count.
fAccurate - Accurately count CHECKMULTISIG, see BIP16 for details.
Note that this is consensus-critical.
"""
n = 0
lastOpcode = OP_INVALIDOPCODE
for (opcode, data, sop_idx) in self.raw_iter():
if opcode in (OP_CHECKSIG, OP_CHECKSIGVERIFY):
n += 1
elif opcode in (OP_CHECKMULTISIG, OP_CHECKMULTISIGVERIFY):
if fAccurate and (OP_1 <= lastOpcode <= OP_16):
n += opcode.decode_op_n()
else:
n += 20
lastOpcode = opcode
return n
SIGHASH_ALL = 1
SIGHASH_NONE = 2
SIGHASH_SINGLE = 3
SIGHASH_ANYONECANPAY = 0x80
def FindAndDelete(script, sig):
"""Consensus critical, see FindAndDelete() in Satoshi codebase"""
r = b''
last_sop_idx = sop_idx = 0
skip = True
for (opcode, data, sop_idx) in script.raw_iter():
if not skip:
r += script[last_sop_idx:sop_idx]
last_sop_idx = sop_idx
if script[sop_idx:sop_idx + len(sig)] == sig:
skip = True
else:
skip = False
if not skip:
r += script[last_sop_idx:]
return CScript(r)
def SignatureHash(script, txTo, inIdx, hashtype):
"""Consensus-correct SignatureHash
Returns (hash, err) to precisely match the consensus-critical behavior of
the SIGHASH_SINGLE bug. (inIdx is *not* checked for validity)
"""
HASH_ONE = b'\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
if inIdx >= len(txTo.vin):
return (HASH_ONE, "inIdx %d out of range (%d)" % (inIdx, len(txTo.vin)))
txtmp = CTransaction(txTo)
for txin in txtmp.vin:
txin.scriptSig = b''
txtmp.vin[inIdx].scriptSig = FindAndDelete(script, CScript([OP_CODESEPARATOR]))
if (hashtype & 0x1f) == SIGHASH_NONE:
txtmp.vout = []
for i in range(len(txtmp.vin)):
if i != inIdx:
txtmp.vin[i].nSequence = 0
elif (hashtype & 0x1f) == SIGHASH_SINGLE:
outIdx = inIdx
if outIdx >= len(txtmp.vout):
return (HASH_ONE, "outIdx %d out of range (%d)" % (outIdx, len(txtmp.vout)))
tmp = txtmp.vout[outIdx]
txtmp.vout = []
for i in range(outIdx):
txtmp.vout.append(CTxOut())
txtmp.vout.append(tmp)
for i in range(len(txtmp.vin)):
if i != inIdx:
txtmp.vin[i].nSequence = 0
if hashtype & SIGHASH_ANYONECANPAY:
tmp = txtmp.vin[inIdx]
txtmp.vin = []
txtmp.vin.append(tmp)
s = txtmp.serialize()
s += struct.pack(b"<I", hashtype)
hash = hash256(s)
return (hash, None)

File Metadata

Mime Type
text/x-diff
Expires
Sun, Mar 2, 12:17 (23 h, 41 m)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
5181076
Default Alt Text
(112 KB)

Event Timeline