diff --git a/Cargo.toml b/Cargo.toml
index ac859662c..a690b1f47 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -1,21 +1,21 @@
 # Copyright (c) 2022 The Bitcoin developers
 
 [workspace]
 
 members = [
     "chronik/abc-rust-error",
     "chronik/abc-rust-lint",
     "chronik/bitcoinsuite-core",
     "chronik/bitcoinsuite-slp",
     "chronik/chronik-bridge",
     "chronik/chronik-db",
     "chronik/chronik-http",
     "chronik/chronik-indexer",
     "chronik/chronik-lib",
     "chronik/chronik-plugin",
     "chronik/chronik-proto",
     "chronik/chronik-util",
 ]
 
 [workspace.package]
-rust-version = "1.72.0"
+rust-version = "1.76.0"
diff --git a/chronik/CMakeLists.txt b/chronik/CMakeLists.txt
index dd4d3de01..83fd25afc 100644
--- a/chronik/CMakeLists.txt
+++ b/chronik/CMakeLists.txt
@@ -1,210 +1,211 @@
 # Copyright (c) 2022 The Bitcoin developers
 
 set(CMAKE_CXX_STANDARD 17)
 set(CMAKE_CXX_STANDARD_REQUIRED ON)
 
 include(FetchContent)
 FetchContent_Declare(
     Corrosion
     GIT_REPOSITORY https://github.com/corrosion-rs/corrosion.git
     GIT_TAG v0.4.7
 )
 FetchContent_MakeAvailable(Corrosion)
 
-set(REQUIRED_RUST_VERSION "1.72.0")
+set(REQUIRED_RUST_VERSION "1.76.0")
 if(Rust_VERSION VERSION_LESS REQUIRED_RUST_VERSION)
     message(FATAL_ERROR "Minimum required Rust version is "
-            "${REQUIRED_RUST_VERSION}, but found ${Rust_VERSION}")
+            "${REQUIRED_RUST_VERSION}, but found ${Rust_VERSION}. "
+            "Use `rustup update stable` to update.")
 endif()
 
 set(CARGO_BUILD_DIR "${CMAKE_BINARY_DIR}/cargo/build")
 set_property(DIRECTORY "${CMAKE_SOURCE_DIR}"
     APPEND PROPERTY
     ADDITIONAL_CLEAN_FILES "${CARGO_BUILD_DIR}"
 )
 
 get_property(
     RUSTC_EXECUTABLE
     TARGET Rust::Rustc PROPERTY IMPORTED_LOCATION
 )
 get_filename_component(RUST_BIN_DIR ${RUSTC_EXECUTABLE} DIRECTORY)
 include(DoOrFail)
 find_program_or_fail(RUSTDOC_EXECUTABLE rustdoc
     PATHS "${RUST_BIN_DIR}"
 )
 
 set(CHRONIK_CARGO_FLAGS --locked)
 
 if(BUILD_BITCOIN_CHRONIK_PLUGINS)
     set(CHRONIK_FEATURE_FLAGS --features plugins)
 endif()
 
 function(add_cargo_custom_target TARGET)
     add_custom_target(${TARGET}
     COMMAND
         "${CMAKE_COMMAND}"
         -E env
             CARGO_TARGET_DIR="${CARGO_BUILD_DIR}"
             CARGO_BUILD_RUSTC="$<TARGET_FILE:Rust::Rustc>"
             CARGO_BUILD_RUSTDOC="${RUSTDOC_EXECUTABLE}"
         "$<TARGET_FILE:Rust::Cargo>"
         ${CHRONIK_CARGO_FLAGS}
         ${ARGN}
     WORKING_DIRECTORY
         "${CMAKE_SOURCE_DIR}"
     )
 endfunction()
 
 function(add_crate_test_targets CRATE)
     set(CRATE_TEST_TARGET "check-crate-${CRATE}")
     add_custom_target("${CRATE_TEST_TARGET}")
 
     set(CLIPPY_TARGET "${CRATE_TEST_TARGET}-clippy")
     add_cargo_custom_target("${CLIPPY_TARGET}"
         clippy
         --package "${CRATE}-*"
         -- -D warnings
     )
 
     set(TEST_TARGET "${CRATE_TEST_TARGET}-test")
     add_cargo_custom_target("${TEST_TARGET}"
         test
         --package "${CRATE}-*"
     )
 
     add_dependencies("${CRATE_TEST_TARGET}"
         "${CLIPPY_TARGET}"
         "${TEST_TARGET}"
     )
 
     add_dependencies("check-crates"
         "${CRATE_TEST_TARGET}"
     )
 endfunction()
 
 add_custom_target("check-crates")
 add_crate_test_targets(abc-rust)
 add_crate_test_targets(bitcoinsuite)
 add_crate_test_targets(chronik)
 
 # Compile Rust, generates chronik-lib
 corrosion_import_crate(
     MANIFEST_PATH "chronik-lib/Cargo.toml"
     FLAGS
         ${CHRONIK_CARGO_FLAGS}
         ${CHRONIK_FEATURE_FLAGS}
 )
 
 set(Rust_TRIPLE
     "${Rust_CARGO_TARGET_ARCH}"
     "${Rust_CARGO_TARGET_VENDOR}"
     "${Rust_CARGO_TARGET_OS}"
 )
 if (Rust_CARGO_TARGET_ENV)
     list(APPEND Rust_TRIPLE "${Rust_CARGO_TARGET_ENV}")
 endif()
 list(JOIN Rust_TRIPLE "-" Rust_CARGO_TARGET)
 
 # cxx crate generates some source files at this location
 set(CXXBRIDGE_GENERATED_FOLDER
     "${CARGO_BUILD_DIR}/${Rust_CARGO_TARGET}/cxxbridge")
 set(CHRONIK_BRIDGE_GENERATED_CPP_FILES
     "${CXXBRIDGE_GENERATED_FOLDER}/chronik-bridge/src/ffi.rs.cc")
 set(CHRONIK_LIB_GENERATED_CPP_FILES
     "${CXXBRIDGE_GENERATED_FOLDER}/chronik-lib/src/ffi.rs.cc")
 
 add_custom_command(
     OUTPUT
         ${CHRONIK_BRIDGE_GENERATED_CPP_FILES}
         ${CHRONIK_LIB_GENERATED_CPP_FILES}
     COMMAND
         "${CMAKE_COMMAND}"
         -E env
         "echo" "Generating cxx bridge files"
     DEPENDS $<TARGET_PROPERTY:chronik-lib,INTERFACE_LINK_LIBRARIES>
 )
 
 # Chronik-bridge library
 # Contains all the C++ functions used by Rust, and the code bridging both
 add_library(chronik-bridge
     chronik-cpp/chronik_bridge.cpp
     chronik-cpp/util/hash.cpp
     ${CHRONIK_BRIDGE_GENERATED_CPP_FILES}
 )
 target_include_directories(chronik-bridge
     PUBLIC
         "${CMAKE_CURRENT_SOURCE_DIR}"
         "${CXXBRIDGE_GENERATED_FOLDER}"
 )
 target_link_libraries(chronik-bridge
     util
     leveldb
 )
 
 # Chronik library
 # Compiles and links all the Chronik code, and exposes chronik::Start and
 # chronik::Stop to run the indexer from C++.
 add_library(chronik
     chronik-cpp/chronik.cpp
     chronik-cpp/chronik_validationinterface.cpp
     ${CHRONIK_LIB_GENERATED_CPP_FILES}
 )
 target_link_libraries(chronik
     chronik-bridge
     chronik-lib
 )
 
 # Plugins require us to link agains libpython
 if(BUILD_BITCOIN_CHRONIK_PLUGINS)
     find_package(Python COMPONENTS Interpreter Development)
     message("Adding Python_LIBRARIES: ${Python_LIBRARIES}")
     target_link_libraries(chronik ${Python_LIBRARIES})
 endif()
 
 if(${CMAKE_SYSTEM_NAME} MATCHES "Windows")
     # mio crate (dependency of tokio) requires winternl.h, found in ntdll
     find_package(NTDLL REQUIRED)
     target_link_libraries(chronik NTDLL::ntdll)
 
     # rocksdb requires items from rpcdce.h, found in rpcrt4
     find_package(RPCRT4 REQUIRED)
     target_link_libraries(chronik RPCRT4::rpcrt4)
 endif()
 
 # Rocksdb requires "atomic"
 include(AddCompilerFlags)
 custom_check_linker_flag(LINKER_HAS_ATOMIC "-latomic")
 if(LINKER_HAS_ATOMIC)
     target_link_libraries(chronik atomic)
 endif()
 
 # Add chronik to server
 target_link_libraries(server
     chronik
     # TODO: We need to add the library again, otherwise gcc linking fails.
     # It's not clear yet why this is the case.
     chronik-bridge
 )
 
 # Install the directory containing the proto files. The trailing slash ensures
 # the directory is not duplicated (see
 # https://cmake.org/cmake/help/v3.16/command/install.html#installing-directories)
 set(CHRONIK_PROTO_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/chronik-proto/proto/")
 set(CHRONIK_PROTO_COMPONENT "chronik-proto")
 install(
     DIRECTORY "${CHRONIK_PROTO_DIRECTORY}"
     DESTINATION "proto"
     COMPONENT "${CHRONIK_PROTO_COMPONENT}"
 )
 
 add_custom_target("install-${CHRONIK_PROTO_COMPONENT}"
     COMMENT "Installing component ${CHRONIK_PROTO_COMPONENT}"
     COMMAND
         "${CMAKE_COMMAND}"
         -E env CMAKE_INSTALL_ALWAYS=ON
         "${CMAKE_COMMAND}"
         -DCOMPONENT="${CHRONIK_PROTO_COMPONENT}"
         -DCMAKE_INSTALL_PREFIX="${CMAKE_INSTALL_PREFIX}"
         -P cmake_install.cmake
     WORKING_DIRECTORY "${CMAKE_BINARY_DIR}"
 )
diff --git a/chronik/bitcoinsuite-slp/src/color.rs b/chronik/bitcoinsuite-slp/src/color.rs
index a2c9b3a78..67fe44478 100644
--- a/chronik/bitcoinsuite-slp/src/color.rs
+++ b/chronik/bitcoinsuite-slp/src/color.rs
@@ -1,596 +1,596 @@
 // Copyright (c) 2023 The Bitcoin developers
 // Distributed under the MIT software license, see the accompanying
 // file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
 //! Module for [`ColoredTx`].
 
 use bitcoinsuite_core::tx::{Tx, TxId};
 use bytes::Bytes;
 use thiserror::Error;
 
 use crate::{
     alp,
     color::ColorError::*,
     empp,
     parsed::{ParsedData, ParsedGenesis, ParsedMintData, ParsedTxType},
     slp,
     structs::{
         Amount, GenesisInfo, Token, TokenMeta, TokenOutput, TokenVariant,
         TxType,
     },
     token_id::TokenId,
     token_type::{SlpTokenType, TokenType},
 };
 
 /// A parsed SLP or ALP tx with outputs colored according to the tokens
 /// specified in the `OP_RETURN`.
 #[derive(Clone, Debug, Default, PartialEq)]
 pub struct ColoredTx {
     /// Parsed sections defining where tokens should go.
     /// Can be multiple for ALP, at most 1 for SLP.
     pub sections: Vec<ColoredTxSection>,
     /// Intentional burns, specifying how many tokens are supposed to be burned
     /// of which type.
     pub intentional_burns: Vec<IntentionalBurn>,
     /// Outputs colored with the tokens as specified in the `OP_RETURN`.
     pub outputs: Vec<Option<TokenOutput>>,
     /// Reports of failed parsing attempts
     pub failed_parsings: Vec<FailedParsing>,
     /// Reports of failed coloring attempts
     pub failed_colorings: Vec<FailedColoring>,
 }
 
 /// Section defining where tokens should go as specified in the `OP_RETURN`.
 #[derive(Clone, Debug, Eq, PartialEq)]
 pub struct ColoredTxSection {
     /// [`TokenMeta`] specified in the section.
     pub meta: TokenMeta,
     /// [`TxType`] specified in the section.
     pub tx_type: TxType,
     /// Minimum required sum of input tokens of this token meta.
     /// Note that this may be different from the `outputs` in [`ColoredTx`] of
     /// the respective token meta for SLP, as SLP allows sending tokens to
     /// nonexistent outputs, whereas in ALP this would be a failed coloring.
     pub required_input_sum: u128,
     /// SLP allows coloring non-existent outputs, but this usually is a
     /// mistake, so we keep track of it here.
     pub has_colored_out_of_range: bool,
     /// [`GenesisInfo`] introduced in this section. Only present for GENESIS.
     pub genesis_info: Option<GenesisInfo>,
 }
 
 /// Any kind of parse error that can occur when processing a tx.
 #[derive(Clone, Debug, Eq, PartialEq)]
 pub enum ParseError {
     /// Parsing the OP_RETURN as eMPP failed
     Empp(empp::ParseError),
     /// Parsing the OP_RETURN as SLP failed
     Slp(slp::ParseError),
     /// Parsing a pushdata in an OP_RETURN as ALP failed
     Alp(alp::ParseError),
 }
 
 /// Report of a failed parsing attempt
 #[derive(Clone, Debug, Eq, PartialEq)]
 pub struct FailedParsing {
     /// Which pushdata in the OP_RETURN failed to parse, or None if the entire
     /// OP_RETURN is the culprit.
     pub pushdata_idx: Option<usize>,
     /// The actual bytes that failed to parse.
     pub bytes: Bytes,
     /// Error explaining why the parsing failed.
     pub error: ParseError,
 }
 
 /// Report of a failed coloring attempt
 #[derive(Clone, Debug, Eq, PartialEq)]
 pub struct FailedColoring {
     /// Which pushdata in the OP_RETURN failed to color the tx.
     pub pushdata_idx: usize,
     /// Parsed data which failed to color.
     pub parsed: ParsedData,
     /// Error explaining why the coloring failed.
     pub error: ColorError,
 }
 
 /// Intentional burn, allowing users to specify cleanly and precisely how tokens
 /// should be removed from supply.
 ///
 /// This prevents the bells and whistles of indexers and wallets to reject a tx
 /// whose intent is to remove tokens from supply.
 #[derive(Clone, Debug, Eq, Hash, PartialEq)]
 pub struct IntentionalBurn {
     /// Which token meta should be burned
     pub meta: TokenMeta,
     /// How many tokens should be burned
     pub amount: Amount,
 }
 
 /// Error when trying to color a parsed section.
 #[derive(Clone, Debug, Error, Eq, PartialEq)]
 pub enum ColorError {
     /// ALP disallows coloring non-existent outputs
     #[error("Too few outputs, expected {expected} but got {actual}")]
     TooFewOutputs {
         /// Expected number of outputs for the coloring to succeed
         expected: usize,
         /// Actual number of outputs
         actual: usize,
     },
 
     /// GENESIS must be first
     #[error("GENESIS must be the first pushdata")]
     GenesisMustBeFirst,
 
     /// Token types must be ascending, to allow clean upgrades when introducing
     /// new token types.
     #[error(
         "Descending token type: {before} > {after}, token types must be in \
          ascending order"
     )]
     DescendingTokenType {
         /// Larger token type coming before
         before: u8,
         /// Smaller token type coming after
         after: u8,
     },
 
     /// Tried coloring using the same token ID twice, which is not allowed.
     /// Only the first coloring counts.
     #[error(
         "Duplicate token_id {token_id}, found in section {prev_section_idx}"
     )]
     DuplicateTokenId {
         /// Valid section that first colored the tx
         prev_section_idx: usize,
         /// Token ID that was colored twice
         token_id: TokenId,
     },
 
     /// Tried doing an intentional burn of the same token ID twice, which is
     /// not allowed. Only the first intentional burn counts.
     #[error(
         "Duplicate intentional burn token_id {token_id}, found in burn \
          #{prev_burn_idx} and #{burn_idx}"
     )]
     DuplicateIntentionalBurnTokenId {
         /// Valid previous intentional burn
         prev_burn_idx: usize,
         /// Invalid later duplicate burn
         burn_idx: usize,
         /// Token ID burned
         token_id: TokenId,
     },
 
     /// Outputs cannot be colored twice by different sections
     #[error(
         "Overlapping amount when trying to color {amount} at index \
          {output_idx}, output is already colored with {prev_token}"
     )]
     OverlappingAmount {
         /// Previous token the output is already colored with
         prev_token: Token,
         /// Index of the output that we tried to color twice
         output_idx: usize,
         /// Amount that tried to color an output twice
         amount: Amount,
     },
 
     /// Outputs cannot be colored twice by different sections
     #[error(
         "Overlapping mint baton when trying to color mint baton at index \
          {output_idx}, output is already colored with {prev_token}"
     )]
     OverlappingMintBaton {
         /// Previous token the output is already colored with
         prev_token: Token,
         /// Index of the output that we tried tot color twice
         output_idx: usize,
     },
 }
 
 impl ColoredTx {
     /// Parse the OP_RETURN of the tx and color its outputs
     pub fn color_tx(tx: &Tx) -> Option<ColoredTx> {
-        let op_return = match tx.outputs.get(0) {
+        let op_return = match tx.outputs.first() {
             Some(output) if output.script.is_opreturn() => &output.script,
             _ => return None,
         };
         let mut colored = ColoredTx {
             sections: vec![],
             intentional_burns: vec![],
             outputs: vec![None; tx.outputs.len()],
             failed_colorings: vec![],
             failed_parsings: vec![],
         };
 
         // First, try to parse and color as SLP tx
         match slp::parse(tx.txid_ref(), op_return) {
             Ok(Some(parsed)) => {
                 colored
                     .color_section(0, parsed, &mut 0)
                     .expect("Coloring SLP always succeeds");
                 return Some(colored);
             }
             Ok(None) => {}
             Err(slp_error) => {
                 colored.failed_parsings.push(FailedParsing {
                     pushdata_idx: None,
                     bytes: op_return.bytecode().clone(),
                     error: ParseError::Slp(slp_error),
                 });
                 return Some(colored);
             }
         }
 
         // Then, try to parse as eMPP tx
         let pushdata = match empp::parse(op_return) {
             Ok(Some(pushdata)) => pushdata,
             Ok(None) => return None,
             Err(empp_err) => {
                 colored.failed_parsings.push(FailedParsing {
                     pushdata_idx: None,
                     bytes: op_return.bytecode().clone(),
                     error: ParseError::Empp(empp_err),
                 });
                 return Some(colored);
             }
         };
 
         // Color all the pushdata as ALP, and if we encountered any ALP
         // sections, return the colored tx.
         if colored.color_all_alp_pushdata(pushdata, tx.txid_ref()) {
             return Some(colored);
         }
 
         // We found an eMPP OP_RETURN but no ALP sections
         None
     }
 
     // Color all the pushdata as ALP one-by-one and return whether we
     // encountered anything ALP-like.
     fn color_all_alp_pushdata(
         &mut self,
         pushdata: Vec<Bytes>,
         txid: &TxId,
     ) -> bool {
         let mut max_token_type = 0;
         let mut has_any_alp = false;
 
         for (pushdata_idx, pushdata) in pushdata.into_iter().enumerate() {
             let parsed = match alp::parse_section(txid, pushdata.clone()) {
                 Ok(Some(parsed)) => parsed,
                 Ok(None) => continue,
                 Err(alp_error) => {
                     self.failed_parsings.push(FailedParsing {
                         pushdata_idx: Some(pushdata_idx),
                         bytes: pushdata,
                         error: ParseError::Alp(alp_error),
                     });
                     has_any_alp = true;
                     continue;
                 }
             };
 
             has_any_alp = true;
             let color_result = self.color_section(
                 pushdata_idx,
                 parsed.clone(),
                 &mut max_token_type,
             );
             if let Err(error) = color_result {
                 self.failed_colorings.push(FailedColoring {
                     pushdata_idx,
                     parsed,
                     error,
                 });
             }
         }
 
         has_any_alp
     }
 
     fn color_section(
         &mut self,
         pushdata_idx: usize,
         parsed: ParsedData,
         max_token_type: &mut u8,
     ) -> Result<(), ColorError> {
         let meta = parsed.meta;
 
         // token_type must be in ascending order
         if *max_token_type > meta.token_type.to_u8() {
             return Err(DescendingTokenType {
                 before: *max_token_type,
                 after: meta.token_type.to_u8(),
             });
         }
         *max_token_type = meta.token_type.to_u8();
 
         // Only report duplicate token IDs on MINT and SEND, burn and GENESIS
         // are handled separately
         if matches!(parsed.tx_type.tx_type(), TxType::MINT | TxType::SEND) {
             for (prev_section_idx, prev_section) in
                 self.sections.iter().enumerate()
             {
                 if prev_section.meta.token_id == meta.token_id {
                     return Err(DuplicateTokenId {
                         prev_section_idx,
                         token_id: meta.token_id,
                     });
                 }
             }
         }
 
         match parsed.tx_type {
             ParsedTxType::Genesis(genesis) => {
                 self.color_genesis(pushdata_idx, meta, genesis)
             }
             ParsedTxType::Mint(mint) => self.color_mint(meta, mint),
             ParsedTxType::Send(send) => self.color_send(meta, send),
             ParsedTxType::Burn(amount) => self.color_burn(meta, amount),
             ParsedTxType::Unknown => self.color_unknown(meta),
         }
     }
 
     fn color_genesis(
         &mut self,
         pushdata_idx: usize,
         meta: TokenMeta,
         genesis: ParsedGenesis,
     ) -> Result<(), ColorError> {
         // GENESIS must be the very first section in the pushdata.
         // This prevents assigning the same token ID to different tokens, even
         // if we introduced a new LOKAD ID, as long as it also upholds this
         // rule.
         if pushdata_idx != 0 {
             return Err(GenesisMustBeFirst);
         }
 
         let has_colored_out_of_range =
             self.color_mint_data(&meta, &genesis.mint_data)?;
         self.sections.push(ColoredTxSection {
             meta,
             tx_type: TxType::GENESIS,
             required_input_sum: 0,
             genesis_info: Some(genesis.info),
             has_colored_out_of_range,
         });
         Ok(())
     }
 
     fn color_mint(
         &mut self,
         meta: TokenMeta,
         mint: ParsedMintData,
     ) -> Result<(), ColorError> {
         let has_colored_out_of_range = self.color_mint_data(&meta, &mint)?;
         self.sections.push(ColoredTxSection {
             meta,
             tx_type: TxType::MINT,
             required_input_sum: 0,
             genesis_info: None,
             has_colored_out_of_range,
         });
         Ok(())
     }
 
     fn color_mint_data(
         &mut self,
         meta: &TokenMeta,
         mint_data: &ParsedMintData,
     ) -> Result<bool, ColorError> {
         let token_idx = self.sections.len();
 
         let mut out_of_range_idx = None;
         // Verify no outputs have been colored already
         for (output_idx, &amount) in
             mint_data.amounts_range().zip(&mint_data.amounts)
         {
             if amount != 0 {
                 match self.outputs.get(output_idx) {
                     Some(Some(token)) => {
                         return Err(OverlappingAmount {
                             prev_token: self.token(token),
                             output_idx,
                             amount,
                         });
                     }
                     Some(None) => {}
                     None => out_of_range_idx = Some(output_idx),
                 }
             }
         }
         for output_idx in mint_data.batons_range() {
             match self.outputs.get(output_idx) {
                 Some(Some(token)) => {
                     return Err(OverlappingMintBaton {
                         prev_token: self.token(token),
                         output_idx,
                     })
                 }
                 Some(None) => {}
                 None => out_of_range_idx = Some(output_idx),
             }
         }
 
         if let Some(output_idx) = out_of_range_idx {
             // ALP forbids amounts and batons for nonexistent outputs
             if meta.token_type.is_alp() {
                 return Err(TooFewOutputs {
                     expected: output_idx + 1,
                     actual: self.outputs.len(),
                 });
             }
         }
 
         // Now, color all outputs
         for (output_idx, &amount) in
             mint_data.amounts_range().zip(&mint_data.amounts)
         {
             if output_idx >= self.outputs.len() {
                 break;
             }
             if amount > 0 {
                 self.outputs[output_idx] = Some(TokenOutput {
                     token_idx,
                     variant: TokenVariant::Amount(amount),
                 });
             }
         }
         for output_idx in mint_data.batons_range() {
             if output_idx >= self.outputs.len() {
                 break;
             }
             self.outputs[output_idx] = Some(TokenOutput {
                 token_idx,
                 variant: TokenVariant::MintBaton,
             });
         }
 
         Ok(out_of_range_idx.is_some())
     }
 
     fn color_send(
         &mut self,
         meta: TokenMeta,
         amounts: Vec<Amount>,
     ) -> Result<(), ColorError> {
         // Verify no outputs have been colored already
         let mut out_of_range_idx = None;
         for (idx, &amount) in amounts.iter().enumerate() {
             if amount != 0 {
                 match self.outputs.get(idx + 1) {
                     Some(Some(token)) => {
                         return Err(OverlappingAmount {
                             prev_token: self.token(token),
                             output_idx: idx + 1,
                             amount,
                         })
                     }
                     Some(None) => {}
                     None => out_of_range_idx = Some(idx + 1),
                 }
             }
         }
 
         if let Some(output_idx) = out_of_range_idx {
             // ALP forbids amounts and batons for nonexistent outputs
             if meta.token_type.is_alp() {
                 return Err(TooFewOutputs {
                     expected: output_idx + 1,
                     actual: self.outputs.len(),
                 });
             }
         }
 
         // Color outputs and also calculate the required input sum
         let mut required_input_sum = 0u128;
         for (idx, &amount) in amounts.iter().enumerate() {
             if amount == 0 {
                 continue;
             }
             required_input_sum += u128::from(amount);
             if let Some(output) = self.outputs.get_mut(idx + 1) {
                 *output = Some(TokenOutput {
                     token_idx: self.sections.len(),
                     variant: TokenVariant::Amount(amount),
                 });
             }
         }
 
         self.sections.push(ColoredTxSection {
             meta,
             tx_type: TxType::SEND,
             required_input_sum,
             genesis_info: None,
             has_colored_out_of_range: out_of_range_idx.is_some(),
         });
         Ok(())
     }
 
     fn color_burn(
         &mut self,
         meta: TokenMeta,
         amount: Amount,
     ) -> Result<(), ColorError> {
         for (prev_burn_idx, prev_burn) in
             self.intentional_burns.iter().enumerate()
         {
             if prev_burn.meta.token_id == meta.token_id {
                 return Err(DuplicateIntentionalBurnTokenId {
                     prev_burn_idx,
                     burn_idx: self.intentional_burns.len(),
                     token_id: meta.token_id,
                 });
             }
         }
         self.intentional_burns
             .push(IntentionalBurn { meta, amount });
         Ok(())
     }
 
     fn color_unknown(&mut self, meta: TokenMeta) -> Result<(), ColorError> {
         // Color all outputs (except the OP_RETURN) that haven't been colored
         // yet as "unknown"
         for token_data in self.outputs.iter_mut().skip(1) {
             if token_data.is_none() {
                 *token_data = Some(TokenOutput {
                     token_idx: self.sections.len(),
                     variant: TokenVariant::Unknown(meta.token_type.to_u8()),
                 });
             }
         }
         self.sections.push(ColoredTxSection {
             meta,
             tx_type: TxType::UNKNOWN,
             required_input_sum: 0,
             genesis_info: None,
             has_colored_out_of_range: false,
         });
         Ok(())
     }
 
     /// Turn a [`TokenOutput`] of this [`ColoredTx`] into a [`Token`].
     pub fn token(&self, token_output: &TokenOutput) -> Token {
         let section = &self.sections[token_output.token_idx];
         Token {
             meta: section.meta,
             variant: token_output.variant,
         }
     }
 }
 
 impl ColoredTxSection {
     /// Whether the section has SLP V2 (MintVault) token type and a MINT tx
     /// type.
     pub fn is_mint_vault_mint(&self) -> bool {
         if self.tx_type != TxType::MINT {
             return false;
         }
         matches!(
             self.meta.token_type,
             TokenType::Slp(SlpTokenType::MintVault),
         )
     }
 }
 
 impl std::fmt::Display for FailedParsing {
     fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
         write!(f, "Parsing failed")?;
         if let Some(pushdata_idx) = self.pushdata_idx {
             write!(f, " at pushdata idx {pushdata_idx}")?;
         }
         write!(f, ": {}", self.error)
     }
 }
 
 impl std::fmt::Display for ParseError {
     fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
         match &self {
             ParseError::Empp(err) => write!(f, "eMPP error: {err}"),
             ParseError::Slp(err) => write!(f, "SLP error: {err}"),
             ParseError::Alp(err) => write!(f, "ALP error: {err}"),
         }
     }
 }
diff --git a/chronik/bitcoinsuite-slp/src/verify.rs b/chronik/bitcoinsuite-slp/src/verify.rs
index 9f98fe19b..d05cc5dd2 100644
--- a/chronik/bitcoinsuite-slp/src/verify.rs
+++ b/chronik/bitcoinsuite-slp/src/verify.rs
@@ -1,451 +1,451 @@
 // Copyright (c) 2023 The Bitcoin developers
 // Distributed under the MIT software license, see the accompanying
 // file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
 //! Module for [`VerifyContext`].
 
 use std::collections::BTreeMap;
 
 use bitcoinsuite_core::script::Script;
 use serde::{Deserialize, Serialize};
 use thiserror::Error;
 
 use crate::{
     alp::consts::MAX_TX_INPUTS,
     color::{ColoredTx, ColoredTxSection},
     parsed::ParsedTxType,
     structs::{GenesisInfo, Token, TokenMeta, TokenVariant, TxType},
     token_tx::{TokenTx, TokenTxEntry},
     token_type::{SlpTokenType, TokenType},
     verify::BurnError::*,
 };
 
 /// Error how token verification based on inputs failed
 #[derive(Clone, Debug, Deserialize, Eq, Error, Hash, PartialEq, Serialize)]
 pub enum BurnError {
     /// ALP has an upper limit on the number tx inputs.
     /// Note that given current consensus rules, having this many inputs is not
     /// possible, so this is a failsafe.
     #[error("Too many tx inputs, got {0} but only {} allowed", MAX_TX_INPUTS)]
     TooManyTxInputs(usize),
 
     /// NFT1 CHILD GENESIS requires an NFT1 GROUP token in the first input
     #[error("Invalid NFT1 CHILD GENESIS: No GROUP token")]
     MissingNft1Group,
 
     /// MINT requires a mint baton in the inputs
     #[error("Missing MINT baton")]
     MissingMintBaton,
 
     /// MINT requires a mint vault input
     #[error("Missing MINT vault")]
     MissingMintVault,
 
     /// SEND transfers cannot have more tokens in the outputs than are supplied
     /// in the inputs.
     #[error("Insufficient token input output sum: {actual} < {required}")]
     InsufficientInputSum {
         /// Required minimum inputs as specified in the outputs
         required: u128,
         /// Actual supplied token amount
         actual: u128,
     },
 }
 
 /// Token spent as an input
 #[derive(Clone, Debug, Eq, PartialEq)]
 pub struct SpentToken {
     /// Input token
     pub token: Token,
     /// GROUP token ID and type of the input, if any
     pub group_token_meta: Option<TokenMeta>,
 }
 
 /// Context under which to verify a [`ColoredTx`].
 #[derive(Debug)]
 pub struct VerifyContext<'a> {
     /// Input tokens of the tx
     pub spent_tokens: &'a [Option<SpentToken>],
     /// scriptPubKeys of the inputs of the tx, required only for SLP V2 MINT
     /// txs
     pub spent_scripts: Option<&'a [Script]>,
     /// [`GenesisInfo`] of the tx's token ID, required only for SLP V2 MINT txs
     pub genesis_info: Option<&'a GenesisInfo>,
     /// Override whether a mint vault input is present by setting this to
     /// `true`.
     pub override_has_mint_vault: Option<bool>,
 }
 
 struct BareBurn {
     burn_amount: u128,
     burns_mint_batons: bool,
     group_token_meta: Option<TokenMeta>,
     is_invalid: bool,
 }
 
 impl VerifyContext<'_> {
     /// Verify the [`ColoredTx`] under the given context and return a verified
     /// [`TokenTx`].
     pub fn verify(&self, tx: ColoredTx) -> TokenTx {
         let mut entries = Vec::new();
         for section in &tx.sections {
             entries.push(self.verify_section(&tx, section));
         }
 
         // Add entries for standalone intentional burns without any
         // corresponding existing sections
         for intentional_burn in &tx.intentional_burns {
             if !entries.iter().any(|burn| {
                 burn.meta.token_id == intentional_burn.meta.token_id
             }) {
                 entries.push(TokenTxEntry {
                     meta: intentional_burn.meta,
                     tx_type: Some(TxType::BURN),
                     genesis_info: None,
                     group_token_meta: None,
                     is_invalid: false,
                     intentional_burn_amount: Some(intentional_burn.amount),
                     actual_burn_amount: 0,
                     burns_mint_batons: false,
                     burn_error: None,
                     has_colored_out_of_range: false,
                     failed_colorings: vec![],
                 });
             }
         }
 
         let bare_burns = self.calc_bare_burns(&tx, &entries);
 
         // Add failed colorings to the matching entry, or add a new one
         for failed_coloring in tx.failed_colorings {
             if let Some(entry) = entries
                 .iter_mut()
                 .find(|entry| entry.meta == failed_coloring.parsed.meta)
             {
                 entry.failed_colorings.push(failed_coloring);
                 continue;
             }
             entries.push(TokenTxEntry {
                 meta: failed_coloring.parsed.meta,
                 tx_type: Some(failed_coloring.parsed.tx_type.tx_type()),
                 genesis_info: match &failed_coloring.parsed.tx_type {
                     ParsedTxType::Genesis(genesis) => {
                         Some(genesis.info.clone())
                     }
                     _ => None,
                 },
                 group_token_meta: None,
                 is_invalid: true,
                 intentional_burn_amount: None,
                 actual_burn_amount: 0,
                 burns_mint_batons: false,
                 burn_error: None,
                 has_colored_out_of_range: false,
                 failed_colorings: vec![failed_coloring],
             });
         }
 
         // Update entries for bare burn or add them
         for (burn_meta, bare_burn) in bare_burns {
             if let Some(entry) =
                 entries.iter_mut().find(|entry| entry.meta == *burn_meta)
             {
                 if bare_burn.burns_mint_batons {
                     entry.is_invalid = true;
                 }
                 entry.actual_burn_amount = bare_burn.burn_amount;
                 entry.burns_mint_batons = bare_burn.burns_mint_batons;
                 entry.group_token_meta = bare_burn.group_token_meta;
                 continue;
             }
             entries.push(TokenTxEntry {
                 meta: *burn_meta,
                 tx_type: None,
                 genesis_info: None,
                 group_token_meta: bare_burn.group_token_meta,
                 is_invalid: bare_burn.is_invalid,
                 intentional_burn_amount: None,
                 actual_burn_amount: bare_burn.burn_amount,
                 burns_mint_batons: bare_burn.burns_mint_batons,
                 burn_error: None,
                 has_colored_out_of_range: false,
                 failed_colorings: vec![],
             });
         }
 
         let outputs = tx
             .outputs
             .into_iter()
             .map(|output| -> Option<_> {
                 let entry = &entries[output.as_ref()?.token_idx];
                 if entry.is_invalid {
                     return None;
                 }
                 output
             })
             .collect::<Vec<_>>();
         TokenTx {
             entries,
             outputs,
             failed_parsings: tx.failed_parsings,
         }
     }
 
     fn verify_section(
         &self,
         tx: &ColoredTx,
         section: &ColoredTxSection,
     ) -> TokenTxEntry {
         let input_sum = self.calc_input_sum(&section.meta);
 
         // Template entry with either zero defaults or copied over from the
         // colored section.
         let entry = TokenTxEntry {
             meta: section.meta,
             tx_type: Some(section.tx_type),
             genesis_info: section.genesis_info.clone(),
             group_token_meta: self.inherited_group_token_meta(&section.meta),
             is_invalid: false,
             intentional_burn_amount: self
                 .intentional_burn_amount(tx, &section.meta),
             actual_burn_amount: 0,
             burns_mint_batons: false,
             burn_error: None,
             has_colored_out_of_range: section.has_colored_out_of_range,
             failed_colorings: vec![],
         };
 
         // ALP only allows up to 2**15 inputs
         if section.meta.token_type.is_alp()
             && self.spent_tokens.len() > MAX_TX_INPUTS
         {
             return TokenTxEntry {
                 is_invalid: true,
                 actual_burn_amount: input_sum,
                 burns_mint_batons: self.has_mint_baton(&section.meta),
                 burn_error: Some(TooManyTxInputs(self.spent_tokens.len())),
                 ..entry
             };
         }
 
         match section.tx_type {
             // NFT1 CHILD tokens have to an NFT1 GROUP token at the 1st input
             TxType::GENESIS
                 if section.meta.token_type
                     == TokenType::Slp(SlpTokenType::Nft1Child) =>
             {
-                match self.spent_tokens.get(0) {
+                match self.spent_tokens.first() {
                     Some(Some(spent_token))
                         if spent_token.token.meta.token_type
                             == TokenType::Slp(SlpTokenType::Nft1Group)
                             && spent_token.token.variant.amount() > 0 =>
                     {
                         TokenTxEntry {
                             group_token_meta: Some(spent_token.token.meta),
                             ..entry
                         }
                     }
                     _ => TokenTxEntry {
                         is_invalid: true,
                         burn_error: Some(MissingNft1Group),
                         ..entry
                     },
                 }
             }
 
             // All other GENESIS txs are self-evident
             TxType::GENESIS => entry,
 
             // SLP V2 Mint Vault must have a given Script as input
             TxType::MINT if section.is_mint_vault_mint() => {
                 if self.has_mint_vault() {
                     return TokenTxEntry {
                         actual_burn_amount: input_sum,
                         ..entry
                     };
                 }
                 TokenTxEntry {
                     is_invalid: true,
                     actual_burn_amount: input_sum,
                     burn_error: Some(MissingMintVault),
                     ..entry
                 }
             }
 
             // All other MINTs must have a matching mint baton
             TxType::MINT => {
                 if self.has_mint_baton(&section.meta) {
                     return TokenTxEntry {
                         actual_burn_amount: input_sum,
                         ..entry
                     };
                 }
                 TokenTxEntry {
                     is_invalid: true,
                     actual_burn_amount: input_sum,
                     burn_error: Some(MissingMintBaton),
                     ..entry
                 }
             }
 
             // SEND cannot spent more than came into the tx as inputs
             TxType::SEND if input_sum < section.required_input_sum => {
                 TokenTxEntry {
                     is_invalid: true,
                     actual_burn_amount: input_sum,
                     burns_mint_batons: self.has_mint_baton(&section.meta),
                     burn_error: Some(InsufficientInputSum {
                         required: section.required_input_sum,
                         actual: input_sum,
                     }),
                     ..entry
                 }
             }
 
             // Valid SEND
             TxType::SEND => {
                 let output_sum = self.calc_output_sum(tx, &section.meta);
                 let actual_burn_amount = input_sum - output_sum;
                 TokenTxEntry {
                     actual_burn_amount,
                     burns_mint_batons: self.has_mint_baton(&section.meta),
                     ..entry
                 }
             }
 
             // UNKNOWN txs are self-evident
             TxType::UNKNOWN => entry,
 
             TxType::BURN => unreachable!(
                 "BURNs are only in intentional_burns, not in sections"
             ),
         }
     }
 
     fn has_mint_baton(&self, meta: &TokenMeta) -> bool {
         self.spent_tokens.iter().flatten().any(|spent_token| {
             &spent_token.token.meta == meta
                 && spent_token.token.variant.is_mint_baton()
         })
     }
 
     fn has_mint_vault(&self) -> bool {
         if let Some(override_has_mint_vault) = self.override_has_mint_vault {
             return override_has_mint_vault;
         }
         let Some(spent_scripts) = self.spent_scripts else {
             panic!(
                 "VerifyContext used incorrectly; spent_scripts must be \
                  present for SLP V2 Mint Vault token types"
             );
         };
         let Some(genesis_info) = self.genesis_info else {
             return false;
         };
         let Some(scripthash) = &genesis_info.mint_vault_scripthash else {
             return false;
         };
         let script = Script::p2sh(scripthash);
         spent_scripts
             .iter()
             .any(|spent_script| spent_script == &script)
     }
 
     fn calc_input_sum(&self, meta: &TokenMeta) -> u128 {
         self.spent_tokens
             .iter()
             .flatten()
             .filter(|token| &token.token.meta == meta)
             .map(|token| token.token.variant.amount() as u128)
             .sum()
     }
 
     fn calc_output_sum(&self, tx: &ColoredTx, meta: &TokenMeta) -> u128 {
         tx.outputs
             .iter()
             .flatten()
             .filter(|token| &tx.sections[token.token_idx].meta == meta)
             .map(|token| token.variant.amount() as u128)
             .sum()
     }
 
     fn inherited_group_token_meta(
         &self,
         meta: &TokenMeta,
     ) -> Option<TokenMeta> {
         self.spent_tokens
             .iter()
             .flatten()
             .find(|token| &token.token.meta == meta)
             .and_then(|token| token.group_token_meta)
     }
 
     fn intentional_burn_amount(
         &self,
         tx: &ColoredTx,
         meta: &TokenMeta,
     ) -> Option<u64> {
         tx.intentional_burns
             .iter()
             .find(|burn| &burn.meta == meta)
             .map(|burn| burn.amount)
     }
 
     // Bare burns: spent tokens without a corresponding section
     fn calc_bare_burns(
         &self,
         tx: &ColoredTx,
         entries: &[TokenTxEntry],
     ) -> BTreeMap<&TokenMeta, BareBurn> {
         let mut bare_burns = BTreeMap::new();
         for (input_idx, input) in self.spent_tokens.iter().enumerate() {
             let Some(input) = input else { continue };
 
             // Input has a corresponding mentioned section, not a bare burn
             if tx
                 .sections
                 .iter()
                 .any(|section| section.meta == input.token.meta)
             {
                 continue;
             }
 
             let bare_burn =
                 bare_burns.entry(&input.token.meta).or_insert(BareBurn {
                     burn_amount: 0,
                     burns_mint_batons: false,
                     group_token_meta: input.group_token_meta,
                     is_invalid: false,
                 });
 
             // We don't consider NFT1 GROUP inputs for NFT1 CHILD GENESIS a burn
             // At this stage the validation that the first input is an NFT1
             // GROUP token is already done, otherwise is_invalid would be true.
             // We still create a bare burn entry so that we get a TokenTxEntry,
             // but leave is_invalid at false and don't increment the burned
             // amount.
             if input_idx == 0 {
                 if let Some(first_entry) = entries.first() {
                     if first_entry.meta.token_type
                         == TokenType::Slp(SlpTokenType::Nft1Child)
                         && first_entry.tx_type == Some(TxType::GENESIS)
                         && !first_entry.is_invalid
                     {
                         continue;
                     }
                 }
             }
 
             // All other bare burns are invalid
             bare_burn.is_invalid = true;
             match input.token.variant {
                 TokenVariant::Amount(amount) => {
                     bare_burn.burn_amount += u128::from(amount)
                 }
                 TokenVariant::MintBaton => bare_burn.burns_mint_batons = true,
                 TokenVariant::Unknown(_) => {}
             }
         }
         bare_burns
     }
 }
diff --git a/chronik/chronik-db/src/io/spent_by.rs b/chronik/chronik-db/src/io/spent_by.rs
index 7d6fb01b3..619e19bfa 100644
--- a/chronik/chronik-db/src/io/spent_by.rs
+++ b/chronik/chronik-db/src/io/spent_by.rs
@@ -1,681 +1,678 @@
 // Copyright (c) 2023 The Bitcoin developers
 // Distributed under the MIT software license, see the accompanying
 // file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
 use std::{
     collections::{hash_map::Entry, HashMap},
     time::Instant,
 };
 
 use abc_rust_error::Result;
 use rocksdb::{ColumnFamilyDescriptor, Options};
 use serde::{Deserialize, Serialize};
 use thiserror::Error;
 
 use crate::{
     db::{Db, WriteBatch, CF, CF_SPENT_BY},
     index_tx::IndexTx,
     io::{merge::catch_merge_errors, TxNum},
     ser::{db_deserialize, db_deserialize_vec, db_serialize, db_serialize_vec},
 };
 
 /// Indicates an output has been spent by an input in a tx.
 /// This is an entry in the list of spent outputs of a tx.
 #[derive(
     Clone,
     Debug,
     Default,
     Deserialize,
     Eq,
     Hash,
     Ord,
     PartialEq,
     PartialOrd,
     Serialize,
 )]
 pub struct SpentByEntry {
     /// Which output has been spent.
     pub out_idx: u32,
     /// Which tx spent the output.
     pub tx_num: TxNum,
     /// Which input of the spending tx spent the output.
     pub input_idx: u32,
 }
 
 struct SpentByColumn<'a> {
     db: &'a Db,
     cf: &'a CF,
 }
 
 /// Write to the DB which outputs of a tx have been spent by which tx (and
 /// input).
 ///
 /// For each tx that has any output spent, there will be a list of entries in
 /// the DB. Each entry tells us which output has been spent, which tx_num spent
 /// it, and which input of that tx.
 ///
 /// Note: While TxWriter writes keys using 8-byte big-endian numbers, this
 /// writes it using [`db_serialize`], because unlike TxWriter, we don't rely on
 /// any ordering. Since [`db_serialize`] is more compact, this saves some space.
 #[derive(Debug)]
 pub struct SpentByWriter<'a> {
     col: SpentByColumn<'a>,
 }
 
 /// Read from the DB which outputs of a tx have been spent by which tx (and
 /// input).
 #[derive(Debug)]
 pub struct SpentByReader<'a> {
     col: SpentByColumn<'a>,
 }
 
 /// In-memory data for spent-by data.
 #[derive(Debug, Default)]
 pub struct SpentByMemData {
     /// Stats about cache hits, num requests etc.
     pub stats: SpentByStats,
 }
 
 /// Stats about cache hits, num requests etc.
 #[derive(Clone, Debug, Default)]
 pub struct SpentByStats {
     /// Total number of txs updated.
     pub n_total: usize,
     /// Time [s] for insert/delete.
     pub t_total: f64,
     /// Time [s] for fetching txs.
     pub t_fetch: f64,
 }
 
 /// Error indicating that something went wrong with writing spent-by data.
 #[derive(Debug, Error, PartialEq, Eq)]
 pub enum SpentByError {
     /// Tried adding a spent-by entry, but it's already marked as spent by
     /// another entry.
     #[error(
         "Inconsistent DB: Duplicate spend by entry for tx_num = {tx_num}: \
          {existing:?} already exists, but tried to add {new:?}"
     )]
     DuplicateSpentByEntry {
         /// An output of this tx_num has been spent
         tx_num: TxNum,
         /// Entry already present in the DB.
         existing: SpentByEntry,
         /// Entry we tried to add.
         new: SpentByEntry,
     },
 
     /// Tried removing a spent-by entry, but it doesn't match what we got from
     /// the disconnected block.
     #[error(
         "Inconsistent DB: Mismatched spend by entry for tx_num = {tx_num}: \
          Expected {expected:?} to be present, but got {actual:?}"
     )]
     MismatchedSpentByEntry {
         /// Tried removing a spent-by entry of an output of this tx_num.
         tx_num: TxNum,
         /// Entry we expected based on the disconnected block.
         expected: SpentByEntry,
         /// Entry actually found in the DB.
         actual: SpentByEntry,
     },
 
     /// Tried removing a spent-by entry, but it doesn't exist.
     #[error(
         "Inconsistent DB: Missing spend by entry for tx_num = {tx_num}: \
          Expected {expected:?} to be present, but none found"
     )]
     MissingSpentByEntry {
         /// Tried removing a spent-by entry of an output of this tx_num.
         tx_num: TxNum,
         /// Entry we expected to be present based on the disconnected block.
         expected: SpentByEntry,
     },
 }
 
 use self::SpentByError::*;
 
 fn ser_tx_num(tx_num: TxNum) -> Result<Vec<u8>> {
     db_serialize(&tx_num)
 }
 
 fn deser_tx_num(bytes: &[u8]) -> Result<TxNum> {
     db_deserialize(bytes)
 }
 
 fn init_merge_spent_by(
     _key: &[u8],
     existing_value: Option<&[u8]>,
     _operands: &rocksdb::MergeOperands,
 ) -> Result<Vec<SpentByEntry>> {
     match existing_value {
         Some(bytes) => db_deserialize_vec::<SpentByEntry>(bytes),
         None => Ok(vec![]),
     }
 }
 
 fn apply_merge_spent_by(
     key: &[u8],
     entries: &mut Vec<SpentByEntry>,
     operand: &[u8],
 ) -> Result<()> {
     let extra_entries = db_deserialize_vec::<SpentByEntry>(operand)?;
     entries.reserve(extra_entries.len());
     for spent_by in extra_entries {
         let search_idx = entries
             .binary_search_by_key(&spent_by.out_idx, |entry| entry.out_idx);
         match search_idx {
             Ok(idx) => {
                 let input_tx_num = deser_tx_num(key)?;
                 // Output already spent by another tx -> corrupted DB?
                 return Err(DuplicateSpentByEntry {
                     tx_num: input_tx_num,
                     existing: entries[idx].clone(),
                     new: spent_by,
                 }
                 .into());
             }
             Err(insert_idx) => {
                 // No entry found -> insert it
                 entries.insert(insert_idx, spent_by);
             }
         }
     }
     Ok(())
 }
 
 fn ser_merge_spent_by(
     _key: &[u8],
     entries: Vec<SpentByEntry>,
 ) -> Result<Vec<u8>> {
     db_serialize_vec::<SpentByEntry>(entries)
 }
 
 impl<'a> SpentByColumn<'a> {
     fn new(db: &'a Db) -> Result<Self> {
         let cf = db.cf(CF_SPENT_BY)?;
         Ok(SpentByColumn { db, cf })
     }
 }
 
 impl<'a> SpentByWriter<'a> {
     /// Create a new [`SpentByWriter`].
     pub fn new(db: &'a Db) -> Result<Self> {
         let col = SpentByColumn::new(db)?;
         Ok(SpentByWriter { col })
     }
 
     /// Add spent-by entries to txs spent in the txs.
     /// For each tx output spent in `txs`, add which tx spent it.
     pub fn insert(
         &self,
         batch: &mut WriteBatch,
         txs: &[IndexTx<'_>],
         mem_data: &mut SpentByMemData,
     ) -> Result<()> {
         let stats = &mut mem_data.stats;
         let t_start = Instant::now();
         stats.n_total += txs.len();
         let mut spent_by_map = HashMap::<TxNum, Vec<SpentByEntry>>::new();
         for tx in txs {
             if tx.is_coinbase {
                 // a coinbase doesn't spend anything
                 continue;
             }
             for (input_idx, (input, &input_tx_num)) in
                 tx.tx.inputs.iter().zip(tx.input_nums.iter()).enumerate()
             {
                 let spent_by = SpentByEntry {
                     out_idx: input.prev_out.out_idx,
                     tx_num: tx.tx_num,
                     input_idx: input_idx as u32,
                 };
-                spent_by_map
-                    .entry(input_tx_num)
-                    .or_insert(vec![])
-                    .push(spent_by);
+                spent_by_map.entry(input_tx_num).or_default().push(spent_by);
             }
         }
         for (tx_num, entries) in spent_by_map {
             batch.merge_cf(
                 self.col.cf,
                 ser_tx_num(tx_num)?,
                 db_serialize_vec::<SpentByEntry>(entries)?,
             );
         }
         stats.t_total += t_start.elapsed().as_secs_f64();
         Ok(())
     }
 
     /// Remove spent-by entries from txs spent in the txs.
     /// For each tx output spent in `txs`, remove which tx spent it.
     pub fn delete(
         &self,
         batch: &mut WriteBatch,
         txs: &[IndexTx<'_>],
         mem_data: &mut SpentByMemData,
     ) -> Result<()> {
         let stats = &mut mem_data.stats;
         let t_start = Instant::now();
         stats.n_total += txs.len();
         let mut spent_by_map = HashMap::<TxNum, Vec<SpentByEntry>>::new();
         for tx in txs {
             if tx.is_coinbase {
                 // a coinbase doesn't spend anything
                 continue;
             }
             for (input_idx, (input, &input_tx_num)) in
                 tx.tx.inputs.iter().zip(tx.input_nums.iter()).enumerate()
             {
                 let spent_by = SpentByEntry {
                     out_idx: input.prev_out.out_idx,
                     tx_num: tx.tx_num,
                     input_idx: input_idx as u32,
                 };
                 let t_fetch = Instant::now();
                 let spent_by_entries =
                     self.get_or_fetch(&mut spent_by_map, input_tx_num)?;
                 stats.t_fetch += t_fetch.elapsed().as_secs_f64();
                 let search_idx = spent_by_entries
                     .binary_search_by_key(&spent_by.out_idx, |entry| {
                         entry.out_idx
                     });
                 match search_idx {
                     Ok(idx) => {
                         // Found the spent-by entry -> remove it.
                         if spent_by_entries[idx] != spent_by {
                             // Existing entry doesn't match what's in the DB.
                             return Err(MismatchedSpentByEntry {
                                 tx_num: input_tx_num,
                                 expected: spent_by,
                                 actual: spent_by_entries[idx].clone(),
                             }
                             .into());
                         }
                         spent_by_entries.remove(idx);
                     }
                     Err(_) => {
                         // Spent-by entry not found, but should be there.
                         return Err(MissingSpentByEntry {
                             tx_num: input_tx_num,
                             expected: spent_by,
                         }
                         .into());
                     }
                 }
             }
         }
         for (tx_num, entries) in spent_by_map {
             let ser_num = ser_tx_num(tx_num)?;
             if entries.is_empty() {
                 batch.delete_cf(self.col.cf, ser_num);
             } else {
                 batch.put_cf(self.col.cf, ser_num, db_serialize_vec(entries)?);
             }
         }
         stats.t_total += t_start.elapsed().as_secs_f64();
         Ok(())
     }
 
     fn get_or_fetch<'b>(
         &self,
         spent_by_map: &'b mut HashMap<TxNum, Vec<SpentByEntry>>,
         tx_num: TxNum,
     ) -> Result<&'b mut Vec<SpentByEntry>> {
         match spent_by_map.entry(tx_num) {
             Entry::Occupied(entry) => Ok(entry.into_mut()),
             Entry::Vacant(entry) => {
                 let db_entries =
                     match self.col.db.get(self.col.cf, ser_tx_num(tx_num)?)? {
                         Some(data) => {
                             db_deserialize_vec::<SpentByEntry>(&data)?
                         }
                         None => vec![],
                     };
                 Ok(entry.insert(db_entries))
             }
         }
     }
 
     pub(crate) fn add_cfs(columns: &mut Vec<ColumnFamilyDescriptor>) {
         let mut options = Options::default();
         options.set_merge_operator(
             "spent_by::merge_op",
             catch_merge_errors(
                 init_merge_spent_by,
                 apply_merge_spent_by,
                 ser_merge_spent_by,
             ),
             |_, _, _| None,
         );
         columns.push(ColumnFamilyDescriptor::new(CF_SPENT_BY, options));
     }
 }
 
 impl<'a> SpentByReader<'a> {
     /// Create a new [`SpentByReader`].
     pub fn new(db: &'a Db) -> Result<Self> {
         let col = SpentByColumn::new(db)?;
         Ok(SpentByReader { col })
     }
 
     /// Query the spent-by entries by [`TxNum`].
     pub fn by_tx_num(
         &self,
         tx_num: TxNum,
     ) -> Result<Option<Vec<SpentByEntry>>> {
         match self.col.db.get(self.col.cf, ser_tx_num(tx_num)?)? {
             Some(data) => Ok(Some(db_deserialize_vec::<SpentByEntry>(&data)?)),
             None => Ok(None),
         }
     }
 }
 
 impl std::fmt::Debug for SpentByColumn<'_> {
     fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
         f.debug_struct("SpentByColumn")
             .field("db", &self.db)
             .field("cf", &"..")
             .finish()
     }
 }
 
 #[cfg(test)]
 mod tests {
     use std::cell::RefCell;
 
     use abc_rust_error::Result;
     use bitcoinsuite_core::tx::Tx;
 
     use crate::{
         db::{Db, WriteBatch},
         index_tx::prepare_indexed_txs,
         io::{
             merge::{check_for_errors, MERGE_ERROR_LOCK},
             BlockTxs, SpentByEntry, SpentByError, SpentByMemData,
             SpentByReader, SpentByWriter, TxEntry, TxWriter, TxsMemData,
         },
         test::make_inputs_tx,
     };
 
     #[test]
     fn test_spent_by() -> Result<()> {
         let _guard = MERGE_ERROR_LOCK.lock().unwrap();
         abc_rust_error::install();
         let tempdir = tempdir::TempDir::new("chronik-db--spent_by")?;
         let mut cfs = Vec::new();
         TxWriter::add_cfs(&mut cfs);
         SpentByWriter::add_cfs(&mut cfs);
         let db = Db::open_with_cfs(tempdir.path(), cfs)?;
         let tx_writer = TxWriter::new(&db)?;
         let spent_by_writer = SpentByWriter::new(&db)?;
         let spent_by_reader = SpentByReader::new(&db)?;
         let mem_data = RefCell::new(SpentByMemData::default());
         let txs_mem_data = RefCell::new(TxsMemData::default());
 
         let block_height = RefCell::new(-1);
         let txs_batch = |txs: &[Tx]| BlockTxs {
             txs: txs
                 .iter()
                 .map(|tx| TxEntry {
                     txid: tx.txid(),
                     ..Default::default()
                 })
                 .collect(),
             block_height: *block_height.borrow(),
         };
         let connect_block = |txs: &[Tx]| -> Result<()> {
             let mut batch = WriteBatch::default();
             *block_height.borrow_mut() += 1;
             let first_tx_num = tx_writer.insert(
                 &mut batch,
                 &txs_batch(txs),
                 &mut txs_mem_data.borrow_mut(),
             )?;
             let index_txs = prepare_indexed_txs(&db, first_tx_num, txs)?;
             spent_by_writer.insert(
                 &mut batch,
                 &index_txs,
                 &mut mem_data.borrow_mut(),
             )?;
             db.write_batch(batch)?;
             for tx in &index_txs {
                 for &input_tx_num in &tx.input_nums {
                     spent_by_reader.by_tx_num(input_tx_num)?;
                     check_for_errors()?;
                 }
             }
             Ok(())
         };
         let disconnect_block = |txs: &[Tx]| -> Result<()> {
             let mut batch = WriteBatch::default();
             let first_tx_num = tx_writer.delete(
                 &mut batch,
                 &txs_batch(txs),
                 &mut txs_mem_data.borrow_mut(),
             )?;
             let index_txs = prepare_indexed_txs(&db, first_tx_num, txs)?;
             spent_by_writer.delete(
                 &mut batch,
                 &index_txs,
                 &mut mem_data.borrow_mut(),
             )?;
             db.write_batch(batch)?;
             *block_height.borrow_mut() -= 1;
             Ok(())
         };
         macro_rules! spent_by {
             (out_idx=$out_idx:literal -> tx_num=$tx_num:literal,
                                          input_idx=$input_idx:literal) => {
                 SpentByEntry {
                     out_idx: $out_idx,
                     tx_num: $tx_num,
                     input_idx: $input_idx,
                 }
             };
         }
 
         let block0 =
             vec![make_inputs_tx(0, [(0x00, u32::MAX, -1)], [-1, -1, -1, -1])];
         connect_block(&block0)?;
 
         let block1 = vec![
             make_inputs_tx(1, [(0x00, u32::MAX, -1)], [-1, -1]),
             // spend 3rd output of tx_num=0
             make_inputs_tx(2, [(0, 3, -1)], [-1, -1, -1]),
         ];
         connect_block(&block1)?;
         assert_eq!(
             spent_by_reader.by_tx_num(0)?,
             Some(vec![spent_by!(out_idx=3 -> tx_num=2, input_idx=0)]),
         );
 
         // Remove block1
         disconnect_block(&block1)?;
         assert_eq!(spent_by_reader.by_tx_num(0)?, None);
 
         // Add block1 again
         connect_block(&block1)?;
 
         let block2 = vec![
             make_inputs_tx(3, [(0x00, u32::MAX, -1)], [-1]),
             // spend 2nd output of tx_num=0, and 0th output of tx_num=1
             make_inputs_tx(4, [(0, 2, -1), (1, 0, -1)], [-1, -1, -1]),
             // spend 1st output of tx_num=1, and 1st output of tx_num=0
             make_inputs_tx(5, [(1, 1, -1), (0, 1, -1)], [-1, -1]),
         ];
         connect_block(&block2)?;
         assert_eq!(
             spent_by_reader.by_tx_num(0)?,
             Some(vec![
                 spent_by!(out_idx=1 -> tx_num=5, input_idx=1),
                 spent_by!(out_idx=2 -> tx_num=4, input_idx=0),
                 spent_by!(out_idx=3 -> tx_num=2, input_idx=0),
             ]),
         );
         assert_eq!(
             spent_by_reader.by_tx_num(1)?,
             Some(vec![
                 spent_by!(out_idx=0 -> tx_num=4, input_idx=1),
                 spent_by!(out_idx=1 -> tx_num=5, input_idx=0),
             ]),
         );
 
         // More complex block
         let block3 = vec![
             make_inputs_tx(6, [(0x00, u32::MAX, -1)], [-1]),
             make_inputs_tx(7, [(4, 0, -1), (2, 0, -1)], [-1, -1]),
             make_inputs_tx(8, [(9, 0, -1), (5, 1, -1)], [-1, -1]),
             make_inputs_tx(9, [(7, 1, -1), (8, 1, -1)], [-1]),
         ];
         connect_block(&block3)?;
         assert_eq!(
             spent_by_reader.by_tx_num(2)?,
             Some(vec![spent_by!(out_idx=0 -> tx_num=7, input_idx=1)]),
         );
         assert_eq!(
             spent_by_reader.by_tx_num(4)?,
             Some(vec![spent_by!(out_idx=0 -> tx_num=7, input_idx=0)]),
         );
         assert_eq!(
             spent_by_reader.by_tx_num(5)?,
             Some(vec![spent_by!(out_idx=1 -> tx_num=8, input_idx=1)]),
         );
         assert_eq!(spent_by_reader.by_tx_num(6)?, None);
         assert_eq!(
             spent_by_reader.by_tx_num(7)?,
             Some(vec![spent_by!(out_idx=1 -> tx_num=9, input_idx=0)]),
         );
         assert_eq!(
             spent_by_reader.by_tx_num(8)?,
             Some(vec![spent_by!(out_idx=1 -> tx_num=9, input_idx=1)]),
         );
         assert_eq!(
             spent_by_reader.by_tx_num(9)?,
             Some(vec![spent_by!(out_idx=0 -> tx_num=8, input_idx=0)]),
         );
 
         disconnect_block(&block3)?;
         assert_eq!(
             spent_by_reader.by_tx_num(0)?,
             Some(vec![
                 spent_by!(out_idx=1 -> tx_num=5, input_idx=1),
                 spent_by!(out_idx=2 -> tx_num=4, input_idx=0),
                 spent_by!(out_idx=3 -> tx_num=2, input_idx=0),
             ]),
         );
         assert_eq!(
             spent_by_reader.by_tx_num(1)?,
             Some(vec![
                 spent_by!(out_idx=0 -> tx_num=4, input_idx=1),
                 spent_by!(out_idx=1 -> tx_num=5, input_idx=0),
             ]),
         );
         assert_eq!(spent_by_reader.by_tx_num(2)?, None);
         assert_eq!(spent_by_reader.by_tx_num(4)?, None);
         assert_eq!(spent_by_reader.by_tx_num(5)?, None);
         assert_eq!(spent_by_reader.by_tx_num(6)?, None);
         assert_eq!(spent_by_reader.by_tx_num(7)?, None);
         assert_eq!(spent_by_reader.by_tx_num(8)?, None);
         assert_eq!(spent_by_reader.by_tx_num(9)?, None);
 
         // failed connect: duplicate entry
         let block_duplicate_spend = vec![
             make_inputs_tx(10, [(0x00, u32::MAX, -1)], []),
             make_inputs_tx(11, [(0, 1, -1)], []),
         ];
         assert_eq!(
             connect_block(&block_duplicate_spend)
                 .unwrap_err()
                 .downcast::<SpentByError>()?,
             SpentByError::DuplicateSpentByEntry {
                 tx_num: 0,
                 existing: spent_by!(out_idx=1 -> tx_num=5, input_idx=1),
                 new: spent_by!(out_idx=1 -> tx_num=7, input_idx=0),
             },
         );
 
         // Ensure failed connect didn't have any side-effects on spent-by data
         assert_eq!(
             spent_by_reader.by_tx_num(0)?,
             Some(vec![
                 spent_by!(out_idx=1 -> tx_num=5, input_idx=1),
                 spent_by!(out_idx=2 -> tx_num=4, input_idx=0),
                 spent_by!(out_idx=3 -> tx_num=2, input_idx=0),
             ]),
         );
         assert_eq!(
             spent_by_reader.by_tx_num(1)?,
             Some(vec![
                 spent_by!(out_idx=0 -> tx_num=4, input_idx=1),
                 spent_by!(out_idx=1 -> tx_num=5, input_idx=0),
             ]),
         );
 
         // Undo side effects introduced by failed connect
         disconnect_block(&[
             make_inputs_tx(10, [(0x00, u32::MAX, -1)], []),
             // Note the missing input values
             make_inputs_tx(11, [], []),
         ])?;
 
         // failed disconnect: mismatched entry
         let block_mismatched_spend = vec![
             make_inputs_tx(3, [(0x00, u32::MAX, -1)], []),
             make_inputs_tx(4, [(0, 1, -1)], []),
         ];
         assert_eq!(
             disconnect_block(&block_mismatched_spend)
                 .unwrap_err()
                 .downcast::<SpentByError>()?,
             SpentByError::MismatchedSpentByEntry {
                 tx_num: 0,
                 expected: spent_by!(out_idx=1 -> tx_num=4, input_idx=0),
                 actual: spent_by!(out_idx=1 -> tx_num=5, input_idx=1),
             },
         );
 
         // failed disconnect: missing entry
         let block_missing_spend = vec![
             make_inputs_tx(3, [(0x00, u32::MAX, -1)], []),
             make_inputs_tx(4, [(3, 1, -1)], []),
         ];
         assert_eq!(
             disconnect_block(&block_missing_spend)
                 .unwrap_err()
                 .downcast::<SpentByError>()?,
             SpentByError::MissingSpentByEntry {
                 tx_num: 3,
                 expected: spent_by!(out_idx=1 -> tx_num=4, input_idx=0),
             },
         );
 
         // disconnect blocks
         disconnect_block(&block2)?;
         assert_eq!(
             spent_by_reader.by_tx_num(0)?,
             Some(vec![spent_by!(out_idx=3 -> tx_num=2, input_idx=0)]),
         );
         assert_eq!(spent_by_reader.by_tx_num(1)?, None);
         assert_eq!(spent_by_reader.by_tx_num(2)?, None);
         assert_eq!(spent_by_reader.by_tx_num(3)?, None);
         assert_eq!(spent_by_reader.by_tx_num(4)?, None);
         assert_eq!(spent_by_reader.by_tx_num(5)?, None);
 
         disconnect_block(&block1)?;
         assert_eq!(spent_by_reader.by_tx_num(0)?, None);
         assert_eq!(spent_by_reader.by_tx_num(1)?, None);
         assert_eq!(spent_by_reader.by_tx_num(2)?, None);
         assert_eq!(spent_by_reader.by_tx_num(3)?, None);
 
         disconnect_block(&block0)?;
         assert_eq!(spent_by_reader.by_tx_num(0)?, None);
         assert_eq!(spent_by_reader.by_tx_num(1)?, None);
 
         drop(db);
         rocksdb::DB::destroy(&rocksdb::Options::default(), tempdir.path())?;
         let _ = check_for_errors();
 
         Ok(())
     }
 }
diff --git a/chronik/chronik-db/src/io/token/batch.rs b/chronik/chronik-db/src/io/token/batch.rs
index e914c19bb..b6910fbed 100644
--- a/chronik/chronik-db/src/io/token/batch.rs
+++ b/chronik/chronik-db/src/io/token/batch.rs
@@ -1,463 +1,463 @@
 // Copyright (c) 2024 The Bitcoin developers
 // Distributed under the MIT software license, see the accompanying
 // file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
 use std::collections::{BTreeMap, HashMap, HashSet};
 
 use abc_rust_error::Result;
 use bimap::BiMap;
 use bitcoinsuite_core::{script::Script, tx::OutPoint};
 use bitcoinsuite_slp::{
     color::ColoredTx,
     structs::{GenesisInfo, Token, TokenMeta, TokenVariant, TxType},
     token_tx::TokenTx,
     token_type::TokenType,
     verify::{SpentToken, VerifyContext},
 };
 use itertools::{Either, Itertools};
 use thiserror::Error;
 use topo_sort::TopoSort;
 
 use crate::{
     index_tx::IndexTx,
     io::{
         token::{BatchError::*, DbToken, DbTokenTx, FLAGS_HAS_MINT_VAULT},
         TxNum,
     },
 };
 
 /// Tx that has token data encoded in it and is ready for token validation
 #[derive(Debug)]
 pub struct PreparedTx<'tx> {
     /// Tx with index data (tx_num etc.)
     pub tx: &'tx IndexTx<'tx>,
     /// Parsed & colored tx. Note that in this context we only store txs with a
     /// non-empty list of sections.
     pub colored: ColoredTx,
 }
 
 /// Struct bundling all the data necessary to process a batch (i.e. a block) of
 /// token txs.
 #[derive(Debug)]
 pub struct BatchProcessor<'tx> {
     /// Tx that have any token info attached
     pub prepared_txs: HashMap<TxNum, PreparedTx<'tx>>,
     /// Non-token txs may still have token inputs
     pub non_token_txs: Vec<&'tx IndexTx<'tx>>,
     /// Whether the batch has any GENESIS txs; if the token index is empty and
     /// we have no GENESIS, we can safely ignore the batch.
     pub has_any_genesis: bool,
 }
 
 /// DB data required to process a batch of txs
 #[derive(Debug)]
 pub struct BatchDbData {
     /// Token tx data coming from the DB
     pub token_txs: BTreeMap<TxNum, DbTokenTx>,
     /// token_nums assigned for each [`TokenMeta`]. This is a bi-map so we can
     /// lookup either direction.
     pub token_metas: BiMap<TxNum, TokenMeta>,
     /// [`GenesisInfo`] from the database; required for SLP V2 Mint Vault txs
     pub genesis_infos: HashMap<TokenMeta, GenesisInfo>,
 }
 
 #[derive(Debug, Default)]
 /// Result of the batch verification before inserting
 pub struct ProcessedTokenTxBatch {
     /// New tokens to be added to the DB
     pub new_tokens: Vec<(TxNum, TokenMeta, GenesisInfo)>,
     /// New DB data for txs to be added to the DB.
     pub db_token_txs: HashMap<TxNum, DbTokenTx>,
     /// Validated token txs in the batch.
     pub valid_txs: HashMap<TxNum, TokenTx>,
     /// Validated spent tokens in the batch, can have entries not it valid_txs
     pub spent_tokens: HashMap<TxNum, Vec<Option<SpentToken>>>,
     /// True if validation of txs was performed, or false if validation was
     /// safely skipped because no tokens are in the DB and the batch contained
     /// no token txs.
     pub did_validation: bool,
 }
 
 /// Error when batch-processing txs, usually implies a critical failure
 #[derive(Debug, Error, PartialEq)]
 pub enum BatchError {
     /// Transactions couldn't be ordered topologically because of a cycle
     /// dependency. Note: This is cryptographically impossible in practice,
     /// because of the irreversability of SHA256.
     #[error("Cycle in SLP txs")]
     Cycle,
 
     /// A token_num that should be in the DB was not found
     #[error("Inconsistent BatchDbData: Missing TokenId for token tx num {0}")]
     MissingTokenTxNum(TxNum),
 
     /// GenesisInfo that should be in the DB was not found
     #[error("Inconsistent Tx: Missing coin for TxInput {0:?}")]
     MissingTxInputCoin(OutPoint),
 }
 
 impl<'tx> BatchProcessor<'tx> {
     /// Prepare the given indexed txs as token txs.
     pub fn prepare(txs: &'tx [IndexTx<'tx>]) -> Self {
         let (prepared_txs, non_token_txs): (HashMap<_, _>, Vec<_>) = txs
             .iter()
             .partition_map(|tx| match ColoredTx::color_tx(tx.tx) {
                 Some(colored) if !colored.sections.is_empty() => {
                     Either::Left((tx.tx_num, PreparedTx { tx, colored }))
                 }
                 _ => Either::Right(tx),
             });
 
         // Coloring step ensures all GENESIS are at the first section
         let has_any_genesis = prepared_txs
             .values()
             .any(|tx| tx.colored.sections[0].tx_type == TxType::GENESIS);
 
         BatchProcessor {
             prepared_txs,
             non_token_txs,
             has_any_genesis,
         }
     }
 
     /// Collect all [`TokenMeta`]s of the SLP V2 Mint Vault txs in the batch
     pub fn collect_mint_vault_metas(&self) -> HashSet<TokenMeta> {
         self.prepared_txs
             .values()
             .filter(|tx| tx.colored.sections[0].is_mint_vault_mint())
             .map(|tx| tx.colored.sections[0].meta)
             .collect()
     }
 
     /// Verify the entire batch of txs. It updates the DB data with some token
     /// data during validation, but that can be discarded. The result of the
     /// verification is returned as [`ProcessedTokenTxBatch`].
     pub fn verify(
         mut self,
         mut db_data: BatchDbData,
     ) -> Result<ProcessedTokenTxBatch> {
         // Build a DAG of tx nums so we can sort topologically
         let mut topo_sort = TopoSort::with_capacity(self.prepared_txs.len());
         for (&tx_num, batch_tx) in &self.prepared_txs {
             topo_sort.insert_from_slice(tx_num, &batch_tx.tx.input_nums);
         }
 
         // Iterate txs in topological order
         let mut processed_batch = ProcessedTokenTxBatch {
             did_validation: true,
             ..Default::default()
         };
         for tx_num in topo_sort.into_nodes() {
             let tx_num = tx_num.map_err(|_| Cycle)?;
             let prepared_tx = self.prepared_txs.remove(&tx_num).unwrap();
             self.verify_token_tx(
                 prepared_tx,
                 &mut db_data,
                 &mut processed_batch,
             )?;
         }
 
         // Non-token txs can still contain token inputs, add those to the index
         // too
         for non_token_tx in &self.non_token_txs {
             self.process_non_token_tx(
                 non_token_tx,
                 &mut db_data,
                 &mut processed_batch,
             )?;
         }
 
         Ok(processed_batch)
     }
 
     fn verify_token_tx(
         &self,
         prepared_tx: PreparedTx<'_>,
         db_data: &mut BatchDbData,
         processed_batch: &mut ProcessedTokenTxBatch,
     ) -> Result<()> {
         let tx_num = prepared_tx.tx.tx_num;
         let spent_tokens = self.tx_token_inputs(
             prepared_tx.tx,
             db_data,
             &processed_batch.valid_txs,
         )?;
 
         let first_section = &prepared_tx.colored.sections[0];
         let is_genesis = first_section.genesis_info.is_some();
         let is_mint_vault_mint = first_section.is_mint_vault_mint();
 
         // MINT txs on SLP V2 tokens need spent scripts and genesis data
         let mut spent_scripts = None;
         let mut genesis_info = None;
         if is_mint_vault_mint {
             spent_scripts = Some(Self::tx_spent_scripts(prepared_tx.tx)?);
             genesis_info = db_data.genesis_infos.get(&first_section.meta);
         }
 
         let context = VerifyContext {
             spent_tokens: &spent_tokens,
             spent_scripts: spent_scripts.as_deref(),
             genesis_info,
             override_has_mint_vault: None,
         };
         let valid_tx = context.verify(prepared_tx.colored);
 
         let has_any_inputs = spent_tokens.iter().any(|input| input.is_some());
         let has_any_outputs =
             valid_tx.outputs.iter().any(|token| token.is_some());
         // Don't store txs that have no actual token inputs or outputs
         if !has_any_outputs && !has_any_inputs && !is_genesis {
             return Ok(());
         }
 
         // Add new tokens from GENESIS txs
-        if let Some(entry) = valid_tx.entries.get(0) {
+        if let Some(entry) = valid_tx.entries.first() {
             // Skip invalid GENESIS txs
             if !entry.is_invalid {
                 if let Some(info) = &entry.genesis_info {
                     db_data.token_metas.insert(tx_num, entry.meta);
                     processed_batch.new_tokens.push((
                         tx_num,
                         entry.meta,
                         info.clone(),
                     ));
                     // Note: Don't update db_data.genesis_info, because SLP V2
                     // GENESIS txs require a confirmation before they take
                     // effect.
                 }
             }
         }
 
         let mut token_tx_nums = Vec::new();
         let mut token_metas = Vec::new();
         let mut group_token_metas = BTreeMap::new();
         for entry in &valid_tx.entries {
             let Some(&token_tx_num) =
                 db_data.token_metas.get_by_right(&entry.meta)
             else {
                 continue;
             };
             if !token_metas.iter().contains(&entry.meta) {
                 token_tx_nums.push(token_tx_num);
                 token_metas.push(entry.meta);
             }
             entry.group_token_meta.and_then(|group_meta| {
                 let &tx_num = db_data.token_metas.get_by_right(&group_meta)?;
                 if !token_metas.iter().contains(&group_meta) {
                     token_tx_nums.push(tx_num);
                     token_metas.push(group_meta);
                     group_token_metas.insert(entry.meta, group_meta);
                 }
                 Some(())
             });
         }
 
         let mut flags = 0;
         if is_mint_vault_mint {
-            let first_entry = valid_tx.entries.get(0);
+            let first_entry = valid_tx.entries.first();
             let has_mint_vault =
                 first_entry.map_or(false, |entry| !entry.is_invalid);
             if has_mint_vault {
                 flags |= FLAGS_HAS_MINT_VAULT;
             }
         }
 
         let db_token_tx = DbTokenTx {
             token_tx_nums,
             group_token_indices: group_token_metas
                 .iter()
                 .map(|(meta, group_meta)| {
                     (
                         meta_idx(meta, &token_metas),
                         meta_idx(group_meta, &token_metas),
                     )
                 })
                 .collect(),
             inputs: spent_tokens
                 .iter()
                 .map(|input| {
                     to_db_token(
                         input.as_ref().map(|input| &input.token),
                         &token_metas,
                     )
                 })
                 .collect::<Vec<_>>(),
             outputs: valid_tx
                 .outputs
                 .iter()
                 .map(|output| {
                     to_db_token(
                         output
                             .as_ref()
                             .map(|output| valid_tx.token(output))
                             .as_ref(),
                         &token_metas,
                     )
                 })
                 .collect::<Vec<_>>(),
             flags,
         };
         processed_batch.db_token_txs.insert(tx_num, db_token_tx);
         processed_batch.valid_txs.insert(tx_num, valid_tx);
         processed_batch.spent_tokens.insert(tx_num, spent_tokens);
 
         Ok(())
     }
 
     fn process_non_token_tx(
         &self,
         tx: &IndexTx<'_>,
         db_data: &mut BatchDbData,
         processed_batch: &mut ProcessedTokenTxBatch,
     ) -> Result<()> {
         let mut db_token_tx_nums = Vec::new();
         let mut db_inputs = Vec::with_capacity(tx.input_nums.len());
         let mut db_group_token_indices = BTreeMap::new();
         for (&input_tx_num, input) in tx.input_nums.iter().zip(&tx.tx.inputs) {
             let out_idx = input.prev_out.out_idx as usize;
             let db_token_tx = processed_batch
                 .db_token_txs
                 .get(&input_tx_num)
                 .or_else(|| db_data.token_txs.get(&input_tx_num));
             let Some(db_token_tx) = db_token_tx else {
                 continue;
             };
             let db_token = &db_token_tx.outputs[out_idx];
             let Some(token_tx_num) = db_token_tx.token_tx_num(db_token) else {
                 db_inputs.push(*db_token);
                 continue;
             };
             let token_num_idx = db_token_tx_nums
                 .iter()
                 .position(|&tx_num| tx_num == token_tx_num)
                 .unwrap_or_else(|| {
                     db_token_tx_nums.push(token_tx_num);
                     db_token_tx_nums.len() - 1
                 });
             if let Some(group_token_tx_num) =
                 db_token_tx.group_token_tx_num(db_token)
             {
                 let group_token_num_idx = db_token_tx_nums
                     .iter()
                     .position(|&tx_num| tx_num == group_token_tx_num)
                     .unwrap_or_else(|| {
                         db_token_tx_nums.push(group_token_tx_num);
                         db_token_tx_nums.len() - 1
                     });
                 db_group_token_indices
                     .insert(token_num_idx as u32, group_token_num_idx as u32);
             }
             db_inputs.push(db_token.with_idx(token_num_idx as u32));
         }
 
         // Skip non-token tx if we don't have any token inputs
         if db_inputs.iter().any(|&input| input != DbToken::NoToken) {
             processed_batch.db_token_txs.insert(
                 tx.tx_num,
                 DbTokenTx {
                     token_tx_nums: db_token_tx_nums,
                     group_token_indices: db_group_token_indices,
                     inputs: db_inputs,
                     outputs: vec![DbToken::NoToken; tx.tx.outputs.len()],
                     flags: 0,
                 },
             );
             processed_batch.spent_tokens.insert(
                 tx.tx_num,
                 self.tx_token_inputs(tx, db_data, &processed_batch.valid_txs)?,
             );
         }
         Ok(())
     }
 
     fn tx_spent_scripts(tx: &IndexTx<'_>) -> Result<Vec<Script>> {
         let mut spent_scripts = Vec::with_capacity(tx.tx.inputs.len());
         for tx_input in &tx.tx.inputs {
             let coin = tx_input
                 .coin
                 .as_ref()
                 .ok_or(MissingTxInputCoin(tx_input.prev_out))?;
             spent_scripts.push(coin.output.script.clone());
         }
         Ok(spent_scripts)
     }
 
     fn tx_token_inputs(
         &self,
         tx: &IndexTx<'_>,
         db_data: &BatchDbData,
         valid_txs: &HashMap<TxNum, TokenTx>,
     ) -> Result<Vec<Option<SpentToken>>> {
         if tx.is_coinbase {
             Ok(vec![])
         } else {
             let mut inputs = Vec::with_capacity(tx.input_nums.len());
             for (&input_num, input) in tx.input_nums.iter().zip(&tx.tx.inputs) {
                 inputs.push(self.token_output(
                     input_num,
                     input.prev_out.out_idx as usize,
                     db_data,
                     valid_txs,
                 )?);
             }
             Ok(inputs)
         }
     }
 
     fn token_output(
         &self,
         tx_num: TxNum,
         out_idx: usize,
         db_data: &BatchDbData,
         valid_txs: &HashMap<TxNum, TokenTx>,
     ) -> Result<Option<SpentToken>> {
         // Output is from this batch
         if let Some(token_tx) = valid_txs.get(&tx_num) {
             let Some(Some(token_output)) = token_tx.outputs.get(out_idx) else {
                 return Ok(None);
             };
             return Ok(Some(token_tx.spent_token(token_output)));
         }
 
         // Output is from the DB
         let Some(db_token_tx) = db_data.token_txs.get(&tx_num) else {
             return Ok(None);
         };
         Ok(db_token_tx.spent_token(
             &db_token_tx.outputs[out_idx],
             |tx_num| {
                 db_data
                     .token_metas
                     .get_by_left(&tx_num)
                     .cloned()
                     .ok_or(MissingTokenTxNum(tx_num))
             },
         )?)
     }
 }
 
 fn meta_idx(needle_meta: &TokenMeta, metas: &[TokenMeta]) -> u32 {
     metas
         .iter()
         .position(|meta| meta == needle_meta)
         .expect("TokenMeta should be in the list") as u32
 }
 
 fn to_db_token(token: Option<&Token>, metas: &[TokenMeta]) -> DbToken {
     let Some(token) = token else {
         return DbToken::NoToken;
     };
     match token.variant {
         TokenVariant::Amount(amount) => {
             DbToken::Amount(meta_idx(&token.meta, metas), amount)
         }
         TokenVariant::MintBaton => {
             DbToken::MintBaton(meta_idx(&token.meta, metas))
         }
         TokenVariant::Unknown(token_type) => match token.meta.token_type {
             TokenType::Slp(_) => DbToken::UnknownSlp(token_type),
             TokenType::Alp(_) => DbToken::UnknownAlp(token_type),
         },
     }
 }
diff --git a/contrib/gitian-descriptors/gitian-linux.yml b/contrib/gitian-descriptors/gitian-linux.yml
index 133fb4759..1ba2fb332 100644
--- a/contrib/gitian-descriptors/gitian-linux.yml
+++ b/contrib/gitian-descriptors/gitian-linux.yml
@@ -1,245 +1,245 @@
 ---
 name: "bitcoin-abc-linux"
 enable_cache: true
 distro: "debian"
 suites:
 - "bullseye"
 architectures:
 - "amd64"
 packages:
 - "autoconf"
 - "automake"
 - "binutils-aarch64-linux-gnu"
 - "binutils-arm-linux-gnueabihf"
 - "binutils-gold"
 - "bison"
 - "bsdmainutils"
 - "ca-certificates"
 - "clang"
 - "cmake"
 - "curl"
 - "faketime"
 # Use gcc/g++ 9 to avoid introducing the new pthread_cond_clockwait from glibc
 # 2.30, which would make our release binary incompatible with systems using an
 # older glibc version.
 - "g++-9"
 - "g++-9-aarch64-linux-gnu"
 - "g++-9-arm-linux-gnueabihf"
 - "gcc-9"
 - "gcc-9-aarch64-linux-gnu"
 - "gcc-9-arm-linux-gnueabihf"
 - "git"
 - "gperf"
 # Needed for Rocksdb
 - "libc6-dev-i386"
 - "libtool"
 - "ninja-build"
 - "pkg-config"
 - "protobuf-compiler"
 - "python3"
 - "python3-pip"
 remotes:
 - "url": "https://github.com/Bitcoin-ABC/bitcoin-abc.git"
   "dir": "bitcoin"
 files: []
 script: |
 
   WRAP_DIR=$HOME/wrapped
   HOSTS=(
    x86_64-linux-gnu
    arm-linux-gnueabihf
    aarch64-linux-gnu
   )
 
   # CMake toolchain file name differ from host name
   declare -A CMAKE_TOOLCHAIN_FILE
   CMAKE_TOOLCHAIN_FILE[x86_64-linux-gnu]=Linux64.cmake
   CMAKE_TOOLCHAIN_FILE[arm-linux-gnueabihf]=LinuxARM.cmake
   CMAKE_TOOLCHAIN_FILE[aarch64-linux-gnu]=LinuxAArch64.cmake
 
   # Allow extra cmake option to be specified for each host
   declare -A CMAKE_EXTRA_OPTIONS
   # ARM assembly is supported but experimental, disable it for the release
   CMAKE_EXTRA_OPTIONS[arm-linux-gnueabihf]="-DSECP256K1_USE_ASM=OFF"
 
   FAKETIME_HOST_PROGS=""
   FAKETIME_PROGS="date ar ranlib nm"
   HOST_CFLAGS="-O2 -g"
   HOST_CXXFLAGS="-O2 -g"
   HOST_LDFLAGS=-static-libstdc++
 
   export TZ="UTC"
   export BUILD_DIR=`pwd`
   mkdir -p ${WRAP_DIR}
   if test -n "$GBUILD_CACHE_ENABLED"; then
     export SOURCES_PATH=${GBUILD_COMMON_CACHE}
     export BASE_CACHE=${GBUILD_PACKAGE_CACHE}
     mkdir -p ${BASE_CACHE} ${SOURCES_PATH}
   fi
 
   function create_global_faketime_wrappers {
   for prog in ${FAKETIME_PROGS}; do
     echo '#!/usr/bin/env bash' > ${WRAP_DIR}/${prog}
     echo "REAL=\`which -a ${prog} | grep -v ${WRAP_DIR}/${prog} | head -1\`" >> ${WRAP_DIR}/${prog}
     echo 'export LD_PRELOAD=/usr/lib/x86_64-linux-gnu/faketime/libfaketime.so.1' >> ${WRAP_DIR}/${prog}
     echo "export FAKETIME=\"$1\"" >> ${WRAP_DIR}/${prog}
     echo "\$REAL \$@" >> $WRAP_DIR/${prog}
     chmod +x ${WRAP_DIR}/${prog}
   done
   }
 
   function create_per-host_faketime_wrappers {
   for i in ${HOSTS[@]}; do
     for prog in ${FAKETIME_HOST_PROGS}; do
         echo '#!/usr/bin/env bash' > ${WRAP_DIR}/${i}-${prog}
         echo "REAL=\`which -a ${i}-${prog} | grep -v ${WRAP_DIR}/${i}-${prog} | head -1\`" >> ${WRAP_DIR}/${i}-${prog}
         echo 'export LD_PRELOAD=/usr/lib/x86_64-linux-gnu/faketime/libfaketime.so.1' >> ${WRAP_DIR}/${i}-${prog}
         echo "export FAKETIME=\"$1\"" >> ${WRAP_DIR}/${i}-${prog}
         echo "\$REAL \"\$@\"" >> $WRAP_DIR/${i}-${prog}
         chmod +x ${WRAP_DIR}/${i}-${prog}
     done
   done
   }
 
   function create_per-host_compiler_wrapper {
   for i in ${HOSTS[@]}; do
     for prog in gcc g++; do
         echo '#!/usr/bin/env bash' > ${WRAP_DIR}/${i}-${prog}
         echo "REAL=\`which -a ${i}-${prog}-9 | head -1\`" >> ${WRAP_DIR}/${i}-${prog}
         echo "\$REAL \"\$@\"" >> $WRAP_DIR/${i}-${prog}
         chmod +x ${WRAP_DIR}/${i}-${prog}
     done
   done
   }
 
   function create_native_compiler_wrapper {
   for prog in gcc g++; do
       echo '#!/usr/bin/env bash' > ${WRAP_DIR}/${prog}
       echo "REAL=\`which -a x86_64-linux-gnu-${prog}-9 | head -1\`" >> ${WRAP_DIR}/${prog}
       echo "\$REAL \"\$@\"" >> $WRAP_DIR/${prog}
       chmod +x ${WRAP_DIR}/${prog}
   done
   }
 
   pip3 install lief==0.13.2
 
   # Faketime for depends so intermediate results are comparable
   export PATH_orig=${PATH}
   create_global_faketime_wrappers "2000-01-01 12:00:00"
   create_per-host_faketime_wrappers "2000-01-01 12:00:00"
 
   # Wrap the compiler <host>-gcc-9 and <host>-g++-9 into <host>-gcc and
   # <host>-g++
   create_per-host_compiler_wrapper
   # For the current host platform also wrap into regular gcc and g++, assume
   # x86_64
   create_native_compiler_wrapper
 
   export PATH=${WRAP_DIR}:${PATH}
 
   cd bitcoin
   SOURCEDIR=`pwd`
   BASEPREFIX=`pwd`/depends
   # Build dependencies for each host
   for i in ${HOSTS[@]}; do
     make ${MAKEOPTS} -C ${BASEPREFIX} HOST="${i}"
   done
 
   # Faketime for binaries
   export PATH=${PATH_orig}
   create_global_faketime_wrappers "${REFERENCE_DATETIME}"
   create_per-host_faketime_wrappers "${REFERENCE_DATETIME}"
   export PATH=${WRAP_DIR}:${PATH}
 
   mkdir -p source_package
   pushd source_package
   # Any toolchain file will work for building the source package, just pick the
   # first one
   cmake -GNinja .. \
     -DCMAKE_TOOLCHAIN_FILE=${SOURCEDIR}/cmake/platforms/${CMAKE_TOOLCHAIN_FILE[${HOSTS[0]}]}
 
   ninja package_source
   SOURCEDIST=`echo bitcoin-abc-*.tar.gz`
   mv ${SOURCEDIST} ..
   popd
   DISTNAME=`echo ${SOURCEDIST} | sed 's/.tar.*//'`
   # Correct tar file order
   mkdir -p temp
   pushd temp
   tar -xf ../$SOURCEDIST
   find bitcoin-abc-* | sort | tar --mtime="${REFERENCE_DATETIME}" --no-recursion --mode='u+rw,go+r-w,a+X' --owner=0 --group=0 -c -T - | gzip -9n > ../$SOURCEDIST
   popd
 
   ORIGPATH="$PATH"
 
   # Install chronik dependencies
   # Rust
   curl -sSf https://static.rust-lang.org/rustup/archive/1.26.0/x86_64-unknown-linux-gnu/rustup-init -o rustup-init
   echo "0b2f6c8f85a3d02fde2efc0ced4657869d73fccfce59defb4e8d29233116e6db rustup-init" | sha256sum -c
   chmod +x rustup-init
-  ./rustup-init -y --default-toolchain=1.72.0
+  ./rustup-init -y --default-toolchain=1.76.0
 
   # Rust target name differs from our host name, let's map
   declare -A RUST_TARGET
   RUST_TARGET[x86_64-linux-gnu]=x86_64-unknown-linux-gnu
   RUST_TARGET[arm-linux-gnueabihf]=arm-unknown-linux-gnueabihf
   RUST_TARGET[aarch64-linux-gnu]=aarch64-unknown-linux-gnu
   for i in ${HOSTS[@]}; do
     $HOME/.cargo/bin/rustup target add ${RUST_TARGET[${i}]}
   done
 
   # Cleanup
   rm -f rustup-init
 
   # Extend the hosts to include an experimental chronik build.
   # Despite not being a new host per se, it makes it easy to reuse the same code
   # and prevent errors.
   # TODO Remove after chronik is made part of the main release.
   for i in ${HOSTS[@]}; do
     HOSTS+=($i-chronik-experimental)
     CMAKE_TOOLCHAIN_FILE[$i-chronik-experimental]=${CMAKE_TOOLCHAIN_FILE[$i]}
     CMAKE_EXTRA_OPTIONS[$i-chronik-experimental]="${CMAKE_EXTRA_OPTIONS[$i]} -DBUILD_BITCOIN_CHRONIK=ON"
   done
 
   # Extract the release tarball into a dir for each host and build
   for i in ${HOSTS[@]}; do
     export PATH=${BASEPREFIX}/${i}/native/bin:${ORIGPATH}
     mkdir -p distsrc-${i}
     cd distsrc-${i}
     INSTALLPATH=`pwd`/installed/${DISTNAME}
     mkdir -p ${INSTALLPATH}
 
     cmake -GNinja .. \
       -DCMAKE_TOOLCHAIN_FILE=${SOURCEDIR}/cmake/platforms/${CMAKE_TOOLCHAIN_FILE[${i}]} \
       -DCLIENT_VERSION_IS_RELEASE=ON \
       -DENABLE_CLANG_TIDY=OFF \
       -DENABLE_REDUCE_EXPORTS=ON \
       -DENABLE_STATIC_LIBSTDCXX=ON \
       -DENABLE_GLIBC_BACK_COMPAT=ON \
       -DCMAKE_INSTALL_PREFIX=${INSTALLPATH} \
       -DCCACHE=OFF \
       -DUSE_LINKER= \
       ${CMAKE_EXTRA_OPTIONS[${i}]}
 
     ninja
     ninja security-check
 
     # TODO Rust pulls several symbols from GLIBC 2.30, this needs to be fixed.
     # Since it is still in an experimental state, ignore for now.
     if [[ "${i}" != *"chronik-experimental" ]]; then
       ninja symbol-check
     else
       # Install the chronik protobuf files
       ninja install-chronik-proto
     fi
 
     ninja install-debug
 
     cd installed
     find ${DISTNAME} -not -name "*.dbg" | sort | tar --mtime="${REFERENCE_DATETIME}" --no-recursion --mode='u+rw,go+r-w,a+X' --owner=0 --group=0 -c -T - | gzip -9n > ${OUTDIR}/${DISTNAME}-${i}.tar.gz
     find ${DISTNAME} -name "*.dbg" | sort | tar --mtime="${REFERENCE_DATETIME}" --no-recursion --mode='u+rw,go+r-w,a+X' --owner=0 --group=0 -c -T - | gzip -9n > ${OUTDIR}/${DISTNAME}-${i}-debug.tar.gz
     cd ../../
     rm -rf distsrc-${i}
   done
   mkdir -p $OUTDIR/src
   mv $SOURCEDIST $OUTDIR/src
diff --git a/contrib/utils/install-dependencies-bullseye.sh b/contrib/utils/install-dependencies-bullseye.sh
index 2be4f5d6a..e6c2ed07b 100755
--- a/contrib/utils/install-dependencies-bullseye.sh
+++ b/contrib/utils/install-dependencies-bullseye.sh
@@ -1,212 +1,212 @@
 #!/usr/bin/env bash
 
 export LC_ALL=C.UTF-8
 
 set -euxo pipefail
 
 dpkg --add-architecture i386
 
 PACKAGES=(
   arcanist
   automake
   autotools-dev
   binutils
   bison
   bsdmainutils
   build-essential
   ccache
   cmake
   curl
   default-jdk
   devscripts
   doxygen
   dput
   g++-9
   g++-9-aarch64-linux-gnu
   g++-9-arm-linux-gnueabihf
   g++-9-multilib
   g++-mingw-w64
   gcc-9
   gcc-9-aarch64-linux-gnu
   gcc-9-arm-linux-gnueabihf
   gcc-9-multilib
   gettext-base
   git
   golang
   gnupg
   graphviz
   gperf
   help2man
   jq
   lcov
   less
   lib32stdc++-10-dev
   libboost-dev
   libbz2-dev
   libc6-dev:i386
   libcap-dev
   libdb++-dev
   libdb-dev
   libevent-dev
   libjemalloc-dev
   libminiupnpc-dev
   libnatpmp-dev
   libprotobuf-dev
   libpcsclite-dev
   libqrencode-dev
   libqt5core5a
   libqt5dbus5
   libqt5gui5
   libsqlite3-dev
   libssl-dev
   libtinfo5
   libtool
   libzmq3-dev
   lld
   make
   ninja-build
   nsis
   pandoc
   php-codesniffer
   pkg-config
   protobuf-compiler
   python3
   python3-pip
   python3-setuptools
   python3-yaml
   python3-zmq
   qttools5-dev
   qttools5-dev-tools
   shellcheck
   software-properties-common
   swig
   tar
   wget
   xorriso
   xvfb
   yamllint
 )
 
 function join_by() {
   local IFS="$1"
   shift
   echo "$*"
 }
 
 apt-get update
 DEBIAN_FRONTEND=noninteractive apt-get install -y $(join_by ' ' "${PACKAGES[@]}")
 
 BACKPORTS=(
   git-filter-repo
   qemu-user-static
 )
 
 echo "deb http://deb.debian.org/debian bullseye-backports main" | tee -a /etc/apt/sources.list
 apt-get update
 DEBIAN_FRONTEND=noninteractive apt-get -t bullseye-backports install -y $(join_by ' ' "${BACKPORTS[@]}")
 
 # Install llvm and clang
 apt-key add "$(dirname "$0")"/llvm.pub
 add-apt-repository "deb https://apt.llvm.org/bullseye/   llvm-toolchain-bullseye-12 main"
 apt-get update
 
 LLVM_PACKAGES=(
   clang-12
   clang-format-12
   clang-tidy-12
   clang-tools-12
 )
 DEBIAN_FRONTEND=noninteractive apt-get install -y $(join_by ' ' "${LLVM_PACKAGES[@]}")
 
 # Make sure our specific llvm and clang versions have highest priority
 update-alternatives --install /usr/bin/clang clang "$(command -v clang-12)" 100
 update-alternatives --install /usr/bin/clang++ clang++ "$(command -v clang++-12)" 100
 update-alternatives --install /usr/bin/llvm-symbolizer llvm-symbolizer "$(command -v llvm-symbolizer-12)" 100
 
 # Use gcc-9/g++-9 by default so it uses libstdc++-9. This prevents from pulling
 # the new pthread_cond_clockwait symbol from GLIBC_30 and ensure we are testing
 # under the same condition our release it built.
 update-alternatives --install /usr/bin/gcc gcc "$(command -v gcc-9)" 100
 update-alternatives --install /usr/bin/g++ g++ "$(command -v g++-9)" 100
 
 update-alternatives --install /usr/bin/aarch64-linux-gnu-gcc aarch64-linux-gnu-gcc "$(command -v aarch64-linux-gnu-gcc-9)" 100
 update-alternatives --install /usr/bin/aarch64-linux-gnu-g++ aarch64-linux-gnu-g++ "$(command -v aarch64-linux-gnu-g++-9)" 100
 
 update-alternatives --install /usr/bin/arm-linux-gnueabihf-gcc arm-linux-gnueabihf-gcc "$(command -v arm-linux-gnueabihf-gcc-9)" 100
 update-alternatives --install /usr/bin/arm-linux-gnueabihf-g++ arm-linux-gnueabihf-g++ "$(command -v arm-linux-gnueabihf-g++-9)" 100
 
 # Use the mingw posix variant
 update-alternatives --set x86_64-w64-mingw32-g++ $(command -v x86_64-w64-mingw32-g++-posix)
 update-alternatives --set x86_64-w64-mingw32-gcc $(command -v x86_64-w64-mingw32-gcc-posix)
 
 # Python library for merging nested structures
 pip3 install deepmerge
 # For running Python test suites
 pip3 install pytest
 # For en/-decoding protobuf messages
 # This version is compatible with Debian's "protobuf-compiler" package
 pip3 install "protobuf<=3.20"
 # For security-check.py and symbol-check.py
 pip3 install "lief==0.13.2"
 # For Chronik WebSocket endpoint
 pip3 install websocket-client
 
 # Required python linters
 pip3 install black==23.3.0 isort==5.6.4 mypy==0.910 flynt==0.78 flake8==6.0.0
 echo "export PATH=\"$(python3 -m site --user-base)/bin:\$PATH\"" >> ~/.bashrc
 # shellcheck source=/dev/null
 source ~/.bashrc
 
 # Install npm v10.x and nodejs v20.x
 wget https://deb.nodesource.com/setup_20.x -O nodesetup.sh
 echo "f8fb478685fb916cc70858200595a4f087304bcde1e69aa713bf2eb41695afc1 nodesetup.sh" | sha256sum -c
 chmod +x nodesetup.sh
 ./nodesetup.sh
 apt-get install -y nodejs
 
 # Install nyc for mocha unit test reporting
 npm i -g nyc
 
-# Install Rust stable 1.72.0 and nightly from the 2023-12-29
+# Install Rust stable 1.76.0 and nightly from the 2023-12-29
 curl -sSf https://static.rust-lang.org/rustup/archive/1.26.0/x86_64-unknown-linux-gnu/rustup-init -o rustup-init
 echo "0b2f6c8f85a3d02fde2efc0ced4657869d73fccfce59defb4e8d29233116e6db rustup-init" | sha256sum -c
 chmod +x rustup-init
-./rustup-init -y --default-toolchain=1.72.0
+./rustup-init -y --default-toolchain=1.76.0
 RUST_HOME="${HOME}/.cargo/bin"
 RUST_NIGHTLY_DATE=2023-12-29
 "${RUST_HOME}/rustup" install nightly-${RUST_NIGHTLY_DATE}
 "${RUST_HOME}/rustup" component add rustfmt --toolchain nightly-${RUST_NIGHTLY_DATE}
 # Name the nightly toolchain "abc-nightly"
 "${RUST_HOME}/rustup" toolchain link abc-nightly "$(${RUST_HOME}/rustc +nightly-${RUST_NIGHTLY_DATE} --print sysroot)"
 
 # Install required compile platform targets on stable
 "${RUST_HOME}/rustup" target add "i686-unknown-linux-gnu" \
                                  "x86_64-unknown-linux-gnu" \
                                  "aarch64-unknown-linux-gnu" \
                                  "arm-unknown-linux-gnueabihf" \
                                  "x86_64-apple-darwin" \
                                  "x86_64-pc-windows-gnu"
 
 # Install Electrum ABC test dependencies
 here=$(dirname -- "$(readlink -f -- "${BASH_SOURCE[0]}")")
 pip3 install -r "${here}/../../electrum/contrib/requirements/requirements.txt"
 pip3 install -r "${here}/../../electrum/contrib/requirements/requirements-regtest.txt"
 pip3 install -r "${here}/../../electrum/contrib/requirements/requirements-hw.txt"
 
 # Install the winehq-staging version of wine that doesn't suffer from the memory
 # limitations of the previous versions. Installation instructions are from
 # https://wiki.winehq.org/Debian
 mkdir -p /etc/apt/keyrings
 wget -O /etc/apt/keyrings/winehq-archive.key https://dl.winehq.org/wine-builds/winehq.key
 wget -NP /etc/apt/sources.list.d/ https://dl.winehq.org/wine-builds/debian/dists/bullseye/winehq-bullseye.sources
 apt-get update
 WINE_VERSION=8.19~bullseye-1
 # We need all the packages and dependencies to use a pinpointed vesion
 WINE_PACKAGES=(
   winehq-staging
   wine-staging
   wine-staging-amd64
   wine-staging-i386
 )
 # Pinpoint the version so we get consistent results on CI
 DEBIAN_FRONTEND=noninteractive apt-get install -y $(join_by ' ' "${WINE_PACKAGES[@]/%/=${WINE_VERSION}}")