diff --git a/chronik/chronik-http/src/handlers.rs b/chronik/chronik-http/src/handlers.rs --- a/chronik/chronik-http/src/handlers.rs +++ b/chronik/chronik-http/src/handlers.rs @@ -51,6 +51,18 @@ Err(Report::from(RouteNotFound(uri)).into()) } +/// Return a page of the txs of a block. +pub async fn handle_block_txs( + hash_or_height: String, + query_params: &HashMap, + indexer: &ChronikIndexer, +) -> Result { + let blocks = indexer.blocks(); + let page_num: u32 = get_param(query_params, "page")?.unwrap_or(0); + let page_size: u32 = get_param(query_params, "page_size")?.unwrap_or(25); + blocks.block_txs(hash_or_height, page_num as usize, page_size as usize) +} + /// Return a page of the confirmed txs of the given script. /// Scripts are identified by script_type and payload. pub async fn handle_script_confirmed_txs( diff --git a/chronik/chronik-http/src/server.rs b/chronik/chronik-http/src/server.rs --- a/chronik/chronik-http/src/server.rs +++ b/chronik/chronik-http/src/server.rs @@ -113,6 +113,7 @@ Router::new() .route("/blockchain-info", routing::get(handle_blockchain_info)) .route("/block/:hash_or_height", routing::get(handle_block)) + .route("/block-txs/:hash_or_height", routing::get(handle_block_txs)) .route("/blocks/:start/:end", routing::get(handle_block_range)) .route("/tx/:txid", routing::get(handle_tx)) .route("/raw-tx/:txid", routing::get(handle_raw_tx)) @@ -164,6 +165,18 @@ Ok(Protobuf(blocks.by_hash_or_height(hash_or_height)?)) } +async fn handle_block_txs( + Path(hash_or_height): Path, + Query(query_params): Query>, + Extension(indexer): Extension, +) -> Result, ReportError> { + let indexer = indexer.read().await; + Ok(Protobuf( + handlers::handle_block_txs(hash_or_height, &query_params, &indexer) + .await?, + )) +} + async fn handle_tx( Path(txid): Path, Extension(indexer): Extension, diff --git a/chronik/chronik-indexer/src/indexer.rs b/chronik/chronik-indexer/src/indexer.rs --- a/chronik/chronik-indexer/src/indexer.rs +++ b/chronik/chronik-indexer/src/indexer.rs @@ -381,6 +381,7 @@ QueryBlocks { db: &self.db, avalanche: &self.avalanche, + mempool: &self.mempool, } } diff --git a/chronik/chronik-indexer/src/query/blocks.rs b/chronik/chronik-indexer/src/query/blocks.rs --- a/chronik/chronik-indexer/src/query/blocks.rs +++ b/chronik/chronik-indexer/src/query/blocks.rs @@ -4,18 +4,35 @@ //! Module for [`QueryBlocks`], to query blocks. -use abc_rust_error::Result; +use abc_rust_error::{Result, WrapErr}; +use bitcoinsuite_core::{ + block::BlockHash, + tx::{Tx, TxId}, +}; +use chronik_bridge::ffi; use chronik_db::{ db::Db, - io::{BlockHeight, BlockReader, BlockStats, BlockStatsReader, DbBlock}, + io::{ + BlockHeight, BlockReader, BlockStats, BlockStatsReader, DbBlock, + SpentByReader, TxNum, TxReader, + }, + mem::Mempool, }; use chronik_proto::proto; use thiserror::Error; -use crate::{avalanche::Avalanche, query::HashOrHeight}; +use crate::{ + avalanche::Avalanche, + query::{make_tx_proto, HashOrHeight, OutputsSpent}, +}; const MAX_BLOCKS_PAGE_SIZE: usize = 500; +/// Smallest allowed page size +pub const MIN_BLOCK_TXS_PAGE_SIZE: usize = 1; +/// Largest allowed page size +pub const MAX_BLOCK_TXS_PAGE_SIZE: usize = 200; + /// Struct for querying blocks from the DB. #[derive(Debug)] pub struct QueryBlocks<'a> { @@ -23,6 +40,8 @@ pub db: &'a Db, /// Avalanche. pub avalanche: &'a Avalanche, + /// Mempool + pub mempool: &'a Mempool, } /// Errors indicating something went wrong with querying blocks. @@ -50,6 +69,32 @@ /// DB is missing block stats #[error("500: Inconsistent DB: Missing block stats for height {0}")] MissingBlockStats(BlockHeight), + + /// Block has no txs + #[error("500: Inconsistent DB: Block {0} has no txs")] + BlockHasNoTx(BlockHeight), + + /// Block has tx_num that doesn't exist + #[error("500: Inconsistent DB: block {0} has missing tx_num {1}")] + BlockHasMissingTx(BlockHash, TxNum), + + /// Can only request page sizes below a certain maximum. + #[error( + "400: Requested block tx page size {0} is too big, maximum is {}", + MAX_BLOCK_TXS_PAGE_SIZE + )] + RequestPageSizeTooBig(usize), + + /// Can only request page sizes above a certain minimum. + #[error( + "400: Requested block tx page size {0} is too small, minimum is {}", + MIN_BLOCK_TXS_PAGE_SIZE + )] + RequestPageSizeTooSmall(usize), + + /// Reading failed, likely corrupted block data + #[error("500: Reading {0} failed")] + ReadFailure(TxId), } use self::QueryBlockError::*; @@ -113,6 +158,74 @@ Ok(proto::Blocks { blocks }) } + /// Query the txs of a block, paginated. + pub fn block_txs( + &self, + hash_or_height: String, + request_page_num: usize, + request_page_size: usize, + ) -> Result { + if request_page_size < MIN_BLOCK_TXS_PAGE_SIZE { + return Err(RequestPageSizeTooSmall(request_page_size).into()); + } + if request_page_size > MAX_BLOCK_TXS_PAGE_SIZE { + return Err(RequestPageSizeTooBig(request_page_size).into()); + } + let block_reader = BlockReader::new(self.db)?; + let tx_reader = TxReader::new(self.db)?; + let spent_by_reader = SpentByReader::new(self.db)?; + let db_block = match hash_or_height.parse::()? { + HashOrHeight::Hash(hash) => block_reader.by_hash(&hash)?, + HashOrHeight::Height(height) => block_reader.by_height(height)?, + }; + let db_block = db_block.ok_or(BlockNotFound(hash_or_height))?; + let tx_range = tx_reader + .block_tx_num_range(db_block.height)? + .ok_or(BlockHasNoTx(db_block.height))?; + let tx_offset = + request_page_num.saturating_mul(request_page_size) as u64; + let page_tx_num_start = + tx_range.start.saturating_add(tx_offset).min(tx_range.end); + let page_tx_num_end = page_tx_num_start + .saturating_add(request_page_size as u64) + .min(tx_range.end); + let num_page_txs = (page_tx_num_end - page_tx_num_start) as usize; + let mut txs = Vec::with_capacity(num_page_txs); + for tx_num in page_tx_num_start..page_tx_num_end { + let db_tx = tx_reader + .tx_by_tx_num(tx_num)? + .ok_or(BlockHasMissingTx(db_block.hash.clone(), tx_num))?; + let tx = ffi::load_tx( + db_block.file_num, + db_tx.entry.data_pos, + db_tx.entry.undo_pos, + ) + .wrap_err(ReadFailure(db_tx.entry.txid))?; + let outputs_spent = OutputsSpent::query( + &spent_by_reader, + &tx_reader, + self.mempool.spent_by().outputs_spent(&db_tx.entry.txid), + tx_num, + )?; + txs.push(make_tx_proto( + &Tx::from(tx), + &outputs_spent, + db_tx.entry.time_first_seen, + db_tx.entry.is_coinbase, + Some(&db_block), + self.avalanche, + )); + } + let total_num_txs = (tx_range.end - tx_range.start) as usize; + let total_num_pages = + (total_num_txs + request_page_size - 1) / request_page_size; + Ok(proto::TxHistoryPage { + txs, + num_pages: total_num_pages as u32, + num_txs: total_num_txs as u32, + }) + } + /// Query some info about the blockchain, e.g. the tip hash and height. pub fn blockchain_info(&self) -> Result { let block_reader = BlockReader::new(self.db)?; diff --git a/test/functional/chronik_block_txs.py b/test/functional/chronik_block_txs.py new file mode 100644 --- /dev/null +++ b/test/functional/chronik_block_txs.py @@ -0,0 +1,257 @@ +#!/usr/bin/env python3 +# Copyright (c) 2023 The Bitcoin developers +# Distributed under the MIT software license, see the accompanying +# file COPYING or http://www.opensource.org/licenses/mit-license.php. +""" +Test Chronik's /block-txs/:hash_or_height endpoint. +""" + +from test_framework.address import ( + ADDRESS_ECREG_P2SH_OP_TRUE, + ADDRESS_ECREG_UNSPENDABLE, + P2SH_OP_TRUE, + SCRIPTSIG_OP_TRUE, +) +from test_framework.blocktools import ( + GENESIS_BLOCK_HASH, + create_block, + create_coinbase, + make_conform_to_ctor, +) +from test_framework.messages import COutPoint, CTransaction, CTxIn, CTxOut +from test_framework.p2p import P2PDataStore +from test_framework.script import OP_RETURN, CScript +from test_framework.test_framework import BitcoinTestFramework +from test_framework.util import assert_equal + + +class ChronikBlockTxsTest(BitcoinTestFramework): + def set_test_params(self): + self.setup_clean_chain = True + self.num_nodes = 1 + self.extra_args = [['-chronik']] + self.rpc_timeout = 240 + + def skip_test_if_missing_module(self): + self.skip_if_no_chronik() + + def run_test(self): + from test_framework.chronik.client import ChronikClient, pb + from test_framework.chronik.test_data import genesis_cb_tx + + node = self.nodes[0] + node.setmocktime(1300000000) + chronik = ChronikClient('127.0.0.1', node.chronik_port) + + peer = node.add_p2p_connection(P2PDataStore()) + + # Not a valid hash or height + assert_equal(chronik.block_txs('1234f').err(400).msg, + '400: Not a hash or height: 1234f') + assert_equal(chronik.block_txs('00' * 31).err(400).msg, + f'400: Not a hash or height: {"00"*31}') + assert_equal( + chronik.block_txs('01').err(400).msg, + '400: Not a hash or height: 01') + assert_equal(chronik.block_txs('12345678901').err(400).msg, + '400: Not a hash or height: 12345678901') + + assert_equal( + chronik.block_txs('00' * 32, page=0, page_size=201).err(400).msg, + '400: Requested block tx page size 201 is too big, maximum is 200') + assert_equal( + chronik.block_txs('00' * 32, page=0, page_size=0).err(400).msg, + '400: Requested block tx page size 0 is too small, minimum is 1') + assert_equal( + chronik.block_txs('00' * 32, page=0, page_size=2**32).err(400).msg, + '400: Invalid param page_size: 4294967296, ' + + 'number too large to fit in target type') + assert_equal( + chronik.block_txs('00' * 32, page=2**32, page_size=1).err(400).msg, + '400: Invalid param page: 4294967296, ' + + 'number too large to fit in target type') + + assert_equal( + chronik.block_txs(GENESIS_BLOCK_HASH, page=2**32 - 1, page_size=200).ok(), + pb.TxHistoryPage(txs=[], num_pages=1, num_txs=1), + ) + + assert_equal( + chronik.block_txs(GENESIS_BLOCK_HASH).ok(), + pb.TxHistoryPage( + txs=[genesis_cb_tx()], + num_pages=1, + num_txs=1, + ), + ) + + coinblockhash = self.generatetoaddress(node, 1, ADDRESS_ECREG_P2SH_OP_TRUE)[0] + coinblock = node.getblock(coinblockhash) + cointx = coinblock['tx'][0] + + tip = self.generatetoaddress(node, 100, ADDRESS_ECREG_UNSPENDABLE)[-1] + + coinvalue = 5000000000 + tx1 = CTransaction() + tx1.vin = [CTxIn(outpoint=COutPoint(int(cointx, 16), 0), + scriptSig=SCRIPTSIG_OP_TRUE)] + tx1.vout = [ + CTxOut(coinvalue - 10000, P2SH_OP_TRUE), + CTxOut(1000, CScript([OP_RETURN, b'test'])), + ] + tx1.rehash() + + tx2 = CTransaction() + tx2.vin = [CTxIn(outpoint=COutPoint(int(tx1.hash, 16), 0), + scriptSig=SCRIPTSIG_OP_TRUE)] + tx2.vout = [ + CTxOut(3000, CScript([OP_RETURN, b'test'])), + CTxOut(coinvalue - 20000, P2SH_OP_TRUE), + ] + tx2.rehash() + + tx_coinbase = create_coinbase(102, b'\x03' * 33) + + block = create_block(int(tip, 16), + tx_coinbase, + 1300000500) + block.vtx += [tx1, tx2] + make_conform_to_ctor(block) + block.hashMerkleRoot = block.calc_merkle_root() + block.solve() + peer.send_blocks_and_test([block], node) + + block_metadata = pb.BlockMetadata( + height=102, + hash=bytes.fromhex(block.hash)[::-1], + timestamp=1300000500, + ) + + proto_coinbase_tx = pb.Tx( + txid=bytes.fromhex(tx_coinbase.hash)[::-1], + version=1, + inputs=[ + pb.TxInput( + prev_out=pb.OutPoint(txid=bytes(32), out_idx=0xffffffff), + input_script=bytes(tx_coinbase.vin[0].scriptSig), + sequence_no=0xffffffff, + ), + ], + outputs=[ + pb.TxOutput( + value=coinvalue, + output_script=bytes(tx_coinbase.vout[0].scriptPubKey), + ), + pb.TxOutput( + output_script=bytes(CScript([OP_RETURN])), + ), + ], + lock_time=0, + block=block_metadata, + size=len(tx_coinbase.serialize()), + is_coinbase=True, + ) + + proto_tx1 = pb.Tx( + txid=bytes.fromhex(tx1.hash)[::-1], + version=1, + inputs=[ + pb.TxInput( + prev_out=pb.OutPoint(txid=bytes.fromhex(cointx)[::-1], out_idx=0), + input_script=bytes(SCRIPTSIG_OP_TRUE), + output_script=bytes(P2SH_OP_TRUE), + value=coinvalue, + sequence_no=0, + ), + ], + outputs=[ + pb.TxOutput( + value=coinvalue - 10000, + output_script=bytes(P2SH_OP_TRUE), + spent_by=pb.SpentBy( + txid=bytes.fromhex(tx2.hash)[::-1], + input_idx=0, + ), + ), + pb.TxOutput( + value=1000, + output_script=bytes(CScript([OP_RETURN, b'test'])), + ), + ], + lock_time=0, + size=len(tx1.serialize()), + block=block_metadata, + ) + + proto_tx2 = pb.Tx( + txid=bytes.fromhex(tx2.hash)[::-1], + version=1, + inputs=[ + pb.TxInput( + prev_out=pb.OutPoint(txid=bytes.fromhex(tx1.hash)[::-1], out_idx=0), + input_script=bytes(SCRIPTSIG_OP_TRUE), + output_script=bytes(P2SH_OP_TRUE), + value=coinvalue - 10000, + sequence_no=0, + ), + ], + outputs=[ + pb.TxOutput( + value=3000, + output_script=bytes(CScript([OP_RETURN, b'test'])), + ), + pb.TxOutput( + value=coinvalue - 20000, + output_script=bytes(P2SH_OP_TRUE), + ), + ], + lock_time=0, + size=len(tx2.serialize()), + block=block_metadata, + ) + + sorted_tx1, sorted_tx2 = sorted( + [proto_tx1, proto_tx2], key=lambda tx: tx.txid[::-1]) + + for page, tx in enumerate([proto_coinbase_tx, sorted_tx1, sorted_tx2]): + assert_equal( + chronik.block_txs(block.hash, page=page, page_size=1).ok(), + pb.TxHistoryPage( + txs=[tx], + num_pages=3, + num_txs=3, + ), + ) + + assert_equal( + chronik.block_txs(block.hash).ok(), + pb.TxHistoryPage( + txs=[proto_coinbase_tx, sorted_tx1, sorted_tx2], + num_pages=1, + num_txs=3, + ), + ) + + assert_equal( + chronik.block_txs(block.hash, page=0, page_size=2).ok(), + pb.TxHistoryPage( + txs=[proto_coinbase_tx, sorted_tx1], + num_pages=2, + num_txs=3, + ), + ) + assert_equal( + chronik.block_txs(block.hash, page=1, page_size=2).ok(), + pb.TxHistoryPage( + txs=[sorted_tx2], + num_pages=2, + num_txs=3, + ), + ) + + node.invalidateblock(block.hash) + chronik.block_txs(block.hash).err(404) + + +if __name__ == '__main__': + ChronikBlockTxsTest().main() diff --git a/test/functional/test_framework/chronik/client.py b/test/functional/test_framework/chronik/client.py --- a/test/functional/test_framework/chronik/client.py +++ b/test/functional/test_framework/chronik/client.py @@ -48,24 +48,14 @@ self.script_type = script_type self.script_payload = script_payload - def _query_params(self, page=None, page_size=None) -> str: - if page is not None and page_size is not None: - return f'?page={page}&page_size={page_size}' - elif page is not None: - return f'?page={page}' - elif page_size is not None: - return f'?page_size={page_size}' - else: - return '' - def confirmed_txs(self, page=None, page_size=None): - query = self._query_params(page, page_size) + query = _page_query_params(page, page_size) return self.client._request_get( f'/script/{self.script_type}/{self.script_payload}/confirmed-txs{query}', pb.TxHistoryPage) def history(self, page=None, page_size=None): - query = self._query_params(page, page_size) + query = _page_query_params(page, page_size) return self.client._request_get( f'/script/{self.script_type}/{self.script_payload}/history{query}', pb.TxHistoryPage) @@ -145,6 +135,12 @@ def block(self, hash_or_height: Union[str, int]) -> ChronikResponse: return self._request_get(f'/block/{hash_or_height}', pb.Block) + def block_txs(self, hash_or_height: Union[str, int], + page=None, page_size=None) -> ChronikResponse: + query = _page_query_params(page, page_size) + return self._request_get( + f'/block-txs/{hash_or_height}{query}', pb.TxHistoryPage) + def blocks(self, start_height: int, end_height: int) -> ChronikResponse: return self._request_get(f'/blocks/{start_height}/{end_height}', pb.Blocks) @@ -161,3 +157,14 @@ ws = websocket.WebSocket() ws.connect(f'ws://{self.host}:{self.port}/ws', timeout=timeout) return ChronikWs(ws) + + +def _page_query_params(page=None, page_size=None) -> str: + if page is not None and page_size is not None: + return f'?page={page}&page_size={page_size}' + elif page is not None: + return f'?page={page}' + elif page_size is not None: + return f'?page_size={page_size}' + else: + return ''