diff --git a/Cargo.lock b/Cargo.lock
index 1f629fcd9..9a83526b3 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1,2092 +1,2093 @@
 # This file is automatically @generated by Cargo.
 # It is not intended for manual editing.
 version = 3
 
 [[package]]
 name = "abc-rust-error"
 version = "0.1.0"
 dependencies = [
  "abc-rust-lint",
  "eyre",
  "http",
  "stable-eyre",
  "thiserror",
 ]
 
 [[package]]
 name = "abc-rust-lint"
 version = "0.1.0"
 
 [[package]]
 name = "addr2line"
 version = "0.20.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "f4fa78e18c64fce05e902adecd7a5eed15a5e0a3439f7b0e169f0252214865e3"
 dependencies = [
  "gimli",
 ]
 
 [[package]]
 name = "adler"
 version = "1.0.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
 
 [[package]]
 name = "aho-corasick"
 version = "1.0.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "6748e8def348ed4d14996fa801f4122cd763fff530258cdc03f64b25f89d3a5a"
 dependencies = [
  "memchr",
 ]
 
 [[package]]
 name = "anyhow"
 version = "1.0.75"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6"
 
 [[package]]
 name = "async-trait"
 version = "0.1.73"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "bc00ceb34980c03614e35a3a4e218276a0a824e911d07651cd0d858a51e8c0f0"
 dependencies = [
  "proc-macro2",
  "quote",
  "syn 2.0.29",
 ]
 
 [[package]]
 name = "atomic-polyfill"
 version = "0.1.11"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "e3ff7eb3f316534d83a8a2c3d1674ace8a5a71198eba31e2e2b597833f699b28"
 dependencies = [
  "critical-section",
 ]
 
 [[package]]
 name = "autocfg"
 version = "1.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
 
 [[package]]
 name = "axum"
 version = "0.6.20"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "3b829e4e32b91e643de6eafe82b1d90675f5874230191a4ffbc1b336dec4d6bf"
 dependencies = [
  "async-trait",
  "axum-core",
  "base64",
  "bitflags 1.3.2",
  "bytes",
  "futures-util",
  "http",
  "http-body",
  "hyper",
  "itoa",
  "matchit",
  "memchr",
  "mime",
  "percent-encoding",
  "pin-project-lite",
  "rustversion",
  "serde",
  "serde_json",
  "serde_path_to_error",
  "serde_urlencoded",
  "sha1",
  "sync_wrapper",
  "tokio",
  "tokio-tungstenite",
  "tower",
  "tower-layer",
  "tower-service",
 ]
 
 [[package]]
 name = "axum-core"
 version = "0.3.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c"
 dependencies = [
  "async-trait",
  "bytes",
  "futures-util",
  "http",
  "http-body",
  "mime",
  "rustversion",
  "tower-layer",
  "tower-service",
 ]
 
 [[package]]
 name = "backtrace"
 version = "0.3.68"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "4319208da049c43661739c5fade2ba182f09d1dc2299b32298d3a31692b17e12"
 dependencies = [
  "addr2line",
  "cc",
  "cfg-if",
  "libc",
  "miniz_oxide",
  "object",
  "rustc-demangle",
 ]
 
 [[package]]
 name = "base64"
 version = "0.21.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "604178f6c5c21f02dc555784810edfb88d34ac2c73b2eae109655649ee73ce3d"
 
 [[package]]
 name = "bimap"
 version = "0.6.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "230c5f1ca6a325a32553f8640d31ac9b49f2411e901e427570154868b46da4f7"
 
 [[package]]
 name = "bindgen"
 version = "0.65.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "cfdf7b466f9a4903edc73f95d6d2bcd5baf8ae620638762244d3f60143643cc5"
 dependencies = [
  "bitflags 1.3.2",
  "cexpr",
  "clang-sys",
  "lazy_static",
  "lazycell",
  "peeking_take_while",
  "prettyplease 0.2.12",
  "proc-macro2",
  "quote",
  "regex",
  "rustc-hash",
  "shlex",
  "syn 2.0.29",
 ]
 
 [[package]]
 name = "bitcoinsuite-core"
 version = "0.1.0"
 dependencies = [
  "abc-rust-lint",
  "bytes",
  "hex",
  "hex-literal",
  "ripemd",
  "serde",
  "sha2",
  "thiserror",
 ]
 
 [[package]]
 name = "bitcoinsuite-slp"
 version = "0.1.0"
 dependencies = [
  "abc-rust-lint",
  "bitcoinsuite-core",
  "bytes",
  "hex",
  "itertools",
  "pretty_assertions",
  "serde",
  "thiserror",
 ]
 
 [[package]]
 name = "bitflags"
 version = "1.3.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
 
 [[package]]
 name = "bitflags"
 version = "2.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635"
 
 [[package]]
 name = "block-buffer"
 version = "0.10.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71"
 dependencies = [
  "generic-array",
 ]
 
 [[package]]
 name = "byteorder"
 version = "1.4.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610"
 
 [[package]]
 name = "bytes"
 version = "1.5.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223"
 dependencies = [
  "serde",
 ]
 
 [[package]]
 name = "bzip2-sys"
 version = "0.1.11+1.0.8"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "736a955f3fa7875102d57c82b8cac37ec45224a07fd32d58f9f7a186b6cd4cdc"
 dependencies = [
  "cc",
  "libc",
  "pkg-config",
 ]
 
 [[package]]
 name = "cc"
 version = "1.0.83"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0"
 dependencies = [
  "jobserver",
  "libc",
 ]
 
 [[package]]
 name = "cexpr"
 version = "0.6.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766"
 dependencies = [
  "nom",
 ]
 
 [[package]]
 name = "cfg-if"
 version = "1.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
 
 [[package]]
 name = "chronik-bridge"
 version = "0.1.0"
 dependencies = [
  "abc-rust-lint",
  "bitcoinsuite-core",
  "cxx",
  "cxx-build",
 ]
 
 [[package]]
 name = "chronik-db"
 version = "0.1.0"
 dependencies = [
  "abc-rust-error",
  "abc-rust-lint",
  "bimap",
  "bitcoinsuite-core",
  "bitcoinsuite-slp",
  "bytes",
  "chronik-util",
  "fastrand",
  "hex",
  "itertools",
  "postcard",
  "pretty_assertions",
  "rocksdb",
  "seahash",
  "serde",
  "tempdir",
  "thiserror",
  "topo_sort",
 ]
 
 [[package]]
 name = "chronik-http"
 version = "0.1.0"
 dependencies = [
  "abc-rust-error",
  "abc-rust-lint",
  "async-trait",
  "axum",
  "bitcoinsuite-core",
  "chronik-db",
  "chronik-indexer",
  "chronik-proto",
  "chronik-util",
  "futures",
  "hex",
  "hyper",
  "prost",
  "thiserror",
  "tokio",
 ]
 
 [[package]]
 name = "chronik-indexer"
 version = "0.1.0"
 dependencies = [
  "abc-rust-error",
  "abc-rust-lint",
  "bitcoinsuite-core",
  "bitcoinsuite-slp",
+ "bytes",
  "chronik-bridge",
  "chronik-db",
  "chronik-proto",
  "chronik-util",
  "cxx",
  "pretty_assertions",
  "prost",
  "prost-build",
  "tempdir",
  "thiserror",
  "tokio",
 ]
 
 [[package]]
 name = "chronik-lib"
 version = "0.1.0"
 dependencies = [
  "abc-rust-error",
  "abc-rust-lint",
  "bitcoinsuite-core",
  "chronik-bridge",
  "chronik-db",
  "chronik-http",
  "chronik-indexer",
  "chronik-plugin",
  "chronik-util",
  "cxx",
  "cxx-build",
  "thiserror",
  "tokio",
 ]
 
 [[package]]
 name = "chronik-plugin"
 version = "0.1.0"
 dependencies = [
  "abc-rust-error",
  "abc-rust-lint",
  "bitcoinsuite-core",
  "bitcoinsuite-slp",
  "chronik-util",
  "pyo3",
 ]
 
 [[package]]
 name = "chronik-proto"
 version = "0.1.0"
 dependencies = [
  "abc-rust-lint",
  "prost",
  "prost-build",
 ]
 
 [[package]]
 name = "chronik-util"
 version = "0.1.0"
 dependencies = [
  "abc-rust-lint",
 ]
 
 [[package]]
 name = "clang-sys"
 version = "1.6.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "c688fc74432808e3eb684cae8830a86be1d66a2bd58e1f248ed0960a590baf6f"
 dependencies = [
  "glob",
  "libc",
  "libloading",
 ]
 
 [[package]]
 name = "cobs"
 version = "0.2.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "67ba02a97a2bd10f4b59b25c7973101c79642302776489e030cd13cdab09ed15"
 
 [[package]]
 name = "codespan-reporting"
 version = "0.11.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "3538270d33cc669650c4b093848450d380def10c331d38c768e34cac80576e6e"
 dependencies = [
  "termcolor",
  "unicode-width",
 ]
 
 [[package]]
 name = "cpufeatures"
 version = "0.2.9"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "a17b76ff3a4162b0b27f354a0c87015ddad39d35f9c0c36607a3bdd175dde1f1"
 dependencies = [
  "libc",
 ]
 
 [[package]]
 name = "critical-section"
 version = "1.1.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "7059fff8937831a9ae6f0fe4d658ffabf58f2ca96aa9dec1c889f936f705f216"
 
 [[package]]
 name = "crypto-common"
 version = "0.1.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3"
 dependencies = [
  "generic-array",
  "typenum",
 ]
 
 [[package]]
 name = "cxx"
 version = "1.0.106"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "28403c86fc49e3401fdf45499ba37fad6493d9329449d6449d7f0e10f4654d28"
 dependencies = [
  "cc",
  "cxxbridge-flags",
  "cxxbridge-macro",
  "link-cplusplus",
 ]
 
 [[package]]
 name = "cxx-build"
 version = "1.0.106"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "78da94fef01786dc3e0c76eafcd187abcaa9972c78e05ff4041e24fdf059c285"
 dependencies = [
  "cc",
  "codespan-reporting",
  "once_cell",
  "proc-macro2",
  "quote",
  "scratch",
  "syn 2.0.29",
 ]
 
 [[package]]
 name = "cxxbridge-flags"
 version = "1.0.106"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "e2a6f5e1dfb4b34292ad4ea1facbfdaa1824705b231610087b00b17008641809"
 
 [[package]]
 name = "cxxbridge-macro"
 version = "1.0.106"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "50c49547d73ba8dcfd4ad7325d64c6d5391ff4224d498fc39a6f3f49825a530d"
 dependencies = [
  "proc-macro2",
  "quote",
  "syn 2.0.29",
 ]
 
 [[package]]
 name = "data-encoding"
 version = "2.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308"
 
 [[package]]
 name = "diff"
 version = "0.1.13"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8"
 
 [[package]]
 name = "digest"
 version = "0.10.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292"
 dependencies = [
  "block-buffer",
  "crypto-common",
 ]
 
 [[package]]
 name = "either"
 version = "1.9.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07"
 
 [[package]]
 name = "equivalent"
 version = "1.0.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5"
 
 [[package]]
 name = "errno"
 version = "0.3.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "6b30f669a7961ef1631673d2766cc92f52d64f7ef354d4fe0ddfd30ed52f0f4f"
 dependencies = [
  "errno-dragonfly",
  "libc",
  "windows-sys",
 ]
 
 [[package]]
 name = "errno-dragonfly"
 version = "0.1.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf"
 dependencies = [
  "cc",
  "libc",
 ]
 
 [[package]]
 name = "eyre"
 version = "0.6.8"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "4c2b6b5a29c02cdc822728b7d7b8ae1bab3e3b05d44522770ddd49722eeac7eb"
 dependencies = [
  "indenter",
  "once_cell",
 ]
 
 [[package]]
 name = "fastrand"
 version = "2.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "6999dc1837253364c2ebb0704ba97994bd874e8f195d665c50b7548f6ea92764"
 
 [[package]]
 name = "fixedbitset"
 version = "0.4.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80"
 
 [[package]]
 name = "fnv"
 version = "1.0.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
 
 [[package]]
 name = "form_urlencoded"
 version = "1.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652"
 dependencies = [
  "percent-encoding",
 ]
 
 [[package]]
 name = "fuchsia-cprng"
 version = "0.1.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba"
 
 [[package]]
 name = "futures"
 version = "0.3.28"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40"
 dependencies = [
  "futures-channel",
  "futures-core",
  "futures-executor",
  "futures-io",
  "futures-sink",
  "futures-task",
  "futures-util",
 ]
 
 [[package]]
 name = "futures-channel"
 version = "0.3.28"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "955518d47e09b25bbebc7a18df10b81f0c766eaf4c4f1cccef2fca5f2a4fb5f2"
 dependencies = [
  "futures-core",
  "futures-sink",
 ]
 
 [[package]]
 name = "futures-core"
 version = "0.3.28"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "4bca583b7e26f571124fe5b7561d49cb2868d79116cfa0eefce955557c6fee8c"
 
 [[package]]
 name = "futures-executor"
 version = "0.3.28"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0"
 dependencies = [
  "futures-core",
  "futures-task",
  "futures-util",
 ]
 
 [[package]]
 name = "futures-io"
 version = "0.3.28"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964"
 
 [[package]]
 name = "futures-macro"
 version = "0.3.28"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72"
 dependencies = [
  "proc-macro2",
  "quote",
  "syn 2.0.29",
 ]
 
 [[package]]
 name = "futures-sink"
 version = "0.3.28"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "f43be4fe21a13b9781a69afa4985b0f6ee0e1afab2c6f454a8cf30e2b2237b6e"
 
 [[package]]
 name = "futures-task"
 version = "0.3.28"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "76d3d132be6c0e6aa1534069c705a74a5997a356c0dc2f86a47765e5617c5b65"
 
 [[package]]
 name = "futures-util"
 version = "0.3.28"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533"
 dependencies = [
  "futures-channel",
  "futures-core",
  "futures-io",
  "futures-macro",
  "futures-sink",
  "futures-task",
  "memchr",
  "pin-project-lite",
  "pin-utils",
  "slab",
 ]
 
 [[package]]
 name = "generic-array"
 version = "0.14.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a"
 dependencies = [
  "typenum",
  "version_check",
 ]
 
 [[package]]
 name = "getrandom"
 version = "0.2.10"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427"
 dependencies = [
  "cfg-if",
  "libc",
  "wasi",
 ]
 
 [[package]]
 name = "gimli"
 version = "0.27.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "b6c80984affa11d98d1b88b66ac8853f143217b399d3c74116778ff8fdb4ed2e"
 
 [[package]]
 name = "glob"
 version = "0.3.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b"
 
 [[package]]
 name = "hash32"
 version = "0.2.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "b0c35f58762feb77d74ebe43bdbc3210f09be9fe6742234d573bacc26ed92b67"
 dependencies = [
  "byteorder",
 ]
 
 [[package]]
 name = "hashbrown"
 version = "0.14.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a"
 
 [[package]]
 name = "heapless"
 version = "0.7.16"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "db04bc24a18b9ea980628ecf00e6c0264f3c1426dac36c00cb49b6fbad8b0743"
 dependencies = [
  "atomic-polyfill",
  "hash32",
  "rustc_version",
  "serde",
  "spin",
  "stable_deref_trait",
 ]
 
 [[package]]
 name = "heck"
 version = "0.4.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8"
 
 [[package]]
 name = "hermit-abi"
 version = "0.3.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b"
 
 [[package]]
 name = "hex"
 version = "0.4.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
 
 [[package]]
 name = "hex-literal"
 version = "0.3.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "7ebdb29d2ea9ed0083cd8cece49bbd968021bd99b0849edb4a9a7ee0fdf6a4e0"
 
 [[package]]
 name = "http"
 version = "0.2.9"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482"
 dependencies = [
  "bytes",
  "fnv",
  "itoa",
 ]
 
 [[package]]
 name = "http-body"
 version = "0.4.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1"
 dependencies = [
  "bytes",
  "http",
  "pin-project-lite",
 ]
 
 [[package]]
 name = "httparse"
 version = "1.8.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904"
 
 [[package]]
 name = "httpdate"
 version = "1.0.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
 
 [[package]]
 name = "hyper"
 version = "0.14.27"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "ffb1cfd654a8219eaef89881fdb3bb3b1cdc5fa75ded05d6933b2b382e395468"
 dependencies = [
  "bytes",
  "futures-channel",
  "futures-core",
  "futures-util",
  "http",
  "http-body",
  "httparse",
  "httpdate",
  "itoa",
  "pin-project-lite",
  "socket2 0.4.9",
  "tokio",
  "tower-service",
  "tracing",
  "want",
 ]
 
 [[package]]
 name = "idna"
 version = "0.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c"
 dependencies = [
  "unicode-bidi",
  "unicode-normalization",
 ]
 
 [[package]]
 name = "indenter"
 version = "0.3.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683"
 
 [[package]]
 name = "indexmap"
 version = "2.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d"
 dependencies = [
  "equivalent",
  "hashbrown",
 ]
 
 [[package]]
 name = "indoc"
 version = "2.0.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "1e186cfbae8084e513daff4240b4797e342f988cecda4fb6c939150f96315fd8"
 
 [[package]]
 name = "itertools"
 version = "0.10.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473"
 dependencies = [
  "either",
 ]
 
 [[package]]
 name = "itoa"
 version = "1.0.9"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38"
 
 [[package]]
 name = "jobserver"
 version = "0.1.26"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "936cfd212a0155903bcbc060e316fb6cc7cbf2e1907329391ebadc1fe0ce77c2"
 dependencies = [
  "libc",
 ]
 
 [[package]]
 name = "lazy_static"
 version = "1.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
 
 [[package]]
 name = "lazycell"
 version = "1.3.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55"
 
 [[package]]
 name = "libc"
 version = "0.2.147"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3"
 
 [[package]]
 name = "libloading"
 version = "0.7.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f"
 dependencies = [
  "cfg-if",
  "winapi",
 ]
 
 [[package]]
 name = "librocksdb-sys"
 version = "0.11.0+8.1.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "d3386f101bcb4bd252d8e9d2fb41ec3b0862a15a62b478c355b2982efa469e3e"
 dependencies = [
  "bindgen",
  "bzip2-sys",
  "cc",
  "glob",
  "libc",
  "libz-sys",
 ]
 
 [[package]]
 name = "libz-sys"
 version = "1.1.12"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "d97137b25e321a73eef1418d1d5d2eda4d77e12813f8e6dead84bc52c5870a7b"
 dependencies = [
  "cc",
  "pkg-config",
  "vcpkg",
 ]
 
 [[package]]
 name = "link-cplusplus"
 version = "1.0.9"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "9d240c6f7e1ba3a28b0249f774e6a9dd0175054b52dfbb61b16eb8505c3785c9"
 dependencies = [
  "cc",
 ]
 
 [[package]]
 name = "linux-raw-sys"
 version = "0.4.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "57bcfdad1b858c2db7c38303a6d2ad4dfaf5eb53dfeb0910128b2c26d6158503"
 
 [[package]]
 name = "lock_api"
 version = "0.4.10"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "c1cc9717a20b1bb222f333e6a92fd32f7d8a18ddc5a3191a11af45dcbf4dcd16"
 dependencies = [
  "autocfg",
  "scopeguard",
 ]
 
 [[package]]
 name = "log"
 version = "0.4.20"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f"
 
 [[package]]
 name = "matchit"
 version = "0.7.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "ed1202b2a6f884ae56f04cff409ab315c5ce26b5e58d7412e484f01fd52f52ef"
 
 [[package]]
 name = "memchr"
 version = "2.5.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
 
 [[package]]
 name = "memoffset"
 version = "0.9.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c"
 dependencies = [
  "autocfg",
 ]
 
 [[package]]
 name = "mime"
 version = "0.3.17"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
 
 [[package]]
 name = "minimal-lexical"
 version = "0.2.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
 
 [[package]]
 name = "miniz_oxide"
 version = "0.7.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7"
 dependencies = [
  "adler",
 ]
 
 [[package]]
 name = "mio"
 version = "0.8.8"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2"
 dependencies = [
  "libc",
  "wasi",
  "windows-sys",
 ]
 
 [[package]]
 name = "multimap"
 version = "0.8.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a"
 
 [[package]]
 name = "nom"
 version = "7.1.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
 dependencies = [
  "memchr",
  "minimal-lexical",
 ]
 
 [[package]]
 name = "num_cpus"
 version = "1.16.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43"
 dependencies = [
  "hermit-abi",
  "libc",
 ]
 
 [[package]]
 name = "object"
 version = "0.31.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "8bda667d9f2b5051b8833f59f3bf748b28ef54f850f4fcb389a252aa383866d1"
 dependencies = [
  "memchr",
 ]
 
 [[package]]
 name = "once_cell"
 version = "1.18.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d"
 
 [[package]]
 name = "parking_lot"
 version = "0.12.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f"
 dependencies = [
  "lock_api",
  "parking_lot_core",
 ]
 
 [[package]]
 name = "parking_lot_core"
 version = "0.9.9"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e"
 dependencies = [
  "cfg-if",
  "libc",
  "redox_syscall 0.4.1",
  "smallvec",
  "windows-targets",
 ]
 
 [[package]]
 name = "peeking_take_while"
 version = "0.1.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "19b17cddbe7ec3f8bc800887bab5e717348c95ea2ca0b1bf0837fb964dc67099"
 
 [[package]]
 name = "percent-encoding"
 version = "2.3.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94"
 
 [[package]]
 name = "petgraph"
 version = "0.6.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9"
 dependencies = [
  "fixedbitset",
  "indexmap",
 ]
 
 [[package]]
 name = "pin-project"
 version = "1.1.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "fda4ed1c6c173e3fc7a83629421152e01d7b1f9b7f65fb301e490e8cfc656422"
 dependencies = [
  "pin-project-internal",
 ]
 
 [[package]]
 name = "pin-project-internal"
 version = "1.1.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405"
 dependencies = [
  "proc-macro2",
  "quote",
  "syn 2.0.29",
 ]
 
 [[package]]
 name = "pin-project-lite"
 version = "0.2.12"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "12cc1b0bf1727a77a54b6654e7b5f1af8604923edc8b81885f8ec92f9e3f0a05"
 
 [[package]]
 name = "pin-utils"
 version = "0.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184"
 
 [[package]]
 name = "pkg-config"
 version = "0.3.27"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964"
 
 [[package]]
 name = "postcard"
 version = "1.0.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "c9ee729232311d3cd113749948b689627618133b1c5012b77342c1950b25eaeb"
 dependencies = [
  "cobs",
  "heapless",
  "serde",
 ]
 
 [[package]]
 name = "ppv-lite86"
 version = "0.2.17"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de"
 
 [[package]]
 name = "pretty_assertions"
 version = "1.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "af7cee1a6c8a5b9208b3cb1061f10c0cb689087b3d8ce85fb9d2dd7a29b6ba66"
 dependencies = [
  "diff",
  "yansi",
 ]
 
 [[package]]
 name = "prettyplease"
 version = "0.1.25"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "6c8646e95016a7a6c4adea95bafa8a16baab64b583356217f2c85db4a39d9a86"
 dependencies = [
  "proc-macro2",
  "syn 1.0.109",
 ]
 
 [[package]]
 name = "prettyplease"
 version = "0.2.12"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "6c64d9ba0963cdcea2e1b2230fbae2bab30eb25a174be395c41e764bfb65dd62"
 dependencies = [
  "proc-macro2",
  "syn 2.0.29",
 ]
 
 [[package]]
 name = "proc-macro2"
 version = "1.0.66"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9"
 dependencies = [
  "unicode-ident",
 ]
 
 [[package]]
 name = "prost"
 version = "0.11.9"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "0b82eaa1d779e9a4bc1c3217db8ffbeabaae1dca241bf70183242128d48681cd"
 dependencies = [
  "bytes",
  "prost-derive",
 ]
 
 [[package]]
 name = "prost-build"
 version = "0.11.9"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "119533552c9a7ffacc21e099c24a0ac8bb19c2a2a3f363de84cd9b844feab270"
 dependencies = [
  "bytes",
  "heck",
  "itertools",
  "lazy_static",
  "log",
  "multimap",
  "petgraph",
  "prettyplease 0.1.25",
  "prost",
  "prost-types",
  "regex",
  "syn 1.0.109",
  "tempfile",
  "which",
 ]
 
 [[package]]
 name = "prost-derive"
 version = "0.11.9"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "e5d2d8d10f3c6ded6da8b05b5fb3b8a5082514344d56c9f871412d29b4e075b4"
 dependencies = [
  "anyhow",
  "itertools",
  "proc-macro2",
  "quote",
  "syn 1.0.109",
 ]
 
 [[package]]
 name = "prost-types"
 version = "0.11.9"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "213622a1460818959ac1181aaeb2dc9c7f63df720db7d788b3e24eacd1983e13"
 dependencies = [
  "prost",
 ]
 
 [[package]]
 name = "pyo3"
 version = "0.20.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "9a89dc7a5850d0e983be1ec2a463a171d20990487c3cfcd68b5363f1ee3d6fe0"
 dependencies = [
  "cfg-if",
  "indoc",
  "libc",
  "memoffset",
  "parking_lot",
  "pyo3-build-config",
  "pyo3-ffi",
  "pyo3-macros",
  "unindent",
 ]
 
 [[package]]
 name = "pyo3-build-config"
 version = "0.20.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "07426f0d8fe5a601f26293f300afd1a7b1ed5e78b2a705870c5f30893c5163be"
 dependencies = [
  "once_cell",
  "target-lexicon",
 ]
 
 [[package]]
 name = "pyo3-ffi"
 version = "0.20.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "dbb7dec17e17766b46bca4f1a4215a85006b4c2ecde122076c562dd058da6cf1"
 dependencies = [
  "libc",
  "pyo3-build-config",
 ]
 
 [[package]]
 name = "pyo3-macros"
 version = "0.20.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "05f738b4e40d50b5711957f142878cfa0f28e054aa0ebdfc3fd137a843f74ed3"
 dependencies = [
  "proc-macro2",
  "pyo3-macros-backend",
  "quote",
  "syn 2.0.29",
 ]
 
 [[package]]
 name = "pyo3-macros-backend"
 version = "0.20.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "0fc910d4851847827daf9d6cdd4a823fbdaab5b8818325c5e97a86da79e8881f"
 dependencies = [
  "heck",
  "proc-macro2",
  "quote",
  "syn 2.0.29",
 ]
 
 [[package]]
 name = "quote"
 version = "1.0.33"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae"
 dependencies = [
  "proc-macro2",
 ]
 
 [[package]]
 name = "rand"
 version = "0.4.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293"
 dependencies = [
  "fuchsia-cprng",
  "libc",
  "rand_core 0.3.1",
  "rdrand",
  "winapi",
 ]
 
 [[package]]
 name = "rand"
 version = "0.8.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
 dependencies = [
  "libc",
  "rand_chacha",
  "rand_core 0.6.4",
 ]
 
 [[package]]
 name = "rand_chacha"
 version = "0.3.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
 dependencies = [
  "ppv-lite86",
  "rand_core 0.6.4",
 ]
 
 [[package]]
 name = "rand_core"
 version = "0.3.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b"
 dependencies = [
  "rand_core 0.4.2",
 ]
 
 [[package]]
 name = "rand_core"
 version = "0.4.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc"
 
 [[package]]
 name = "rand_core"
 version = "0.6.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c"
 dependencies = [
  "getrandom",
 ]
 
 [[package]]
 name = "rdrand"
 version = "0.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2"
 dependencies = [
  "rand_core 0.3.1",
 ]
 
 [[package]]
 name = "redox_syscall"
 version = "0.3.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29"
 dependencies = [
  "bitflags 1.3.2",
 ]
 
 [[package]]
 name = "redox_syscall"
 version = "0.4.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa"
 dependencies = [
  "bitflags 1.3.2",
 ]
 
 [[package]]
 name = "regex"
 version = "1.9.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "81bc1d4caf89fac26a70747fe603c130093b53c773888797a6329091246d651a"
 dependencies = [
  "aho-corasick",
  "memchr",
  "regex-automata",
  "regex-syntax",
 ]
 
 [[package]]
 name = "regex-automata"
 version = "0.3.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "fed1ceff11a1dddaee50c9dc8e4938bd106e9d89ae372f192311e7da498e3b69"
 dependencies = [
  "aho-corasick",
  "memchr",
  "regex-syntax",
 ]
 
 [[package]]
 name = "regex-syntax"
 version = "0.7.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "e5ea92a5b6195c6ef2a0295ea818b312502c6fc94dde986c5553242e18fd4ce2"
 
 [[package]]
 name = "remove_dir_all"
 version = "0.5.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7"
 dependencies = [
  "winapi",
 ]
 
 [[package]]
 name = "ripemd"
 version = "0.1.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "bd124222d17ad93a644ed9d011a40f4fb64aa54275c08cc216524a9ea82fb09f"
 dependencies = [
  "digest",
 ]
 
 [[package]]
 name = "rocksdb"
 version = "0.21.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "bb6f170a4041d50a0ce04b0d2e14916d6ca863ea2e422689a5b694395d299ffe"
 dependencies = [
  "libc",
  "librocksdb-sys",
 ]
 
 [[package]]
 name = "rustc-demangle"
 version = "0.1.23"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76"
 
 [[package]]
 name = "rustc-hash"
 version = "1.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
 
 [[package]]
 name = "rustc_version"
 version = "0.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366"
 dependencies = [
  "semver",
 ]
 
 [[package]]
 name = "rustix"
 version = "0.38.8"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "19ed4fa021d81c8392ce04db050a3da9a60299050b7ae1cf482d862b54a7218f"
 dependencies = [
  "bitflags 2.4.0",
  "errno",
  "libc",
  "linux-raw-sys",
  "windows-sys",
 ]
 
 [[package]]
 name = "rustversion"
 version = "1.0.14"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4"
 
 [[package]]
 name = "ryu"
 version = "1.0.15"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741"
 
 [[package]]
 name = "scopeguard"
 version = "1.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
 
 [[package]]
 name = "scratch"
 version = "1.0.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "a3cf7c11c38cb994f3d40e8a8cde3bbd1f72a435e4c49e85d6553d8312306152"
 
 [[package]]
 name = "seahash"
 version = "4.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b"
 
 [[package]]
 name = "semver"
 version = "1.0.18"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "b0293b4b29daaf487284529cc2f5675b8e57c61f70167ba415a463651fd6a918"
 
 [[package]]
 name = "serde"
 version = "1.0.185"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "be9b6f69f1dfd54c3b568ffa45c310d6973a5e5148fd40cf515acaf38cf5bc31"
 dependencies = [
  "serde_derive",
 ]
 
 [[package]]
 name = "serde_derive"
 version = "1.0.185"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "dc59dfdcbad1437773485e0367fea4b090a2e0a16d9ffc46af47764536a298ec"
 dependencies = [
  "proc-macro2",
  "quote",
  "syn 2.0.29",
 ]
 
 [[package]]
 name = "serde_json"
 version = "1.0.105"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "693151e1ac27563d6dbcec9dee9fbd5da8539b20fa14ad3752b2e6d363ace360"
 dependencies = [
  "itoa",
  "ryu",
  "serde",
 ]
 
 [[package]]
 name = "serde_path_to_error"
 version = "0.1.14"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "4beec8bce849d58d06238cb50db2e1c417cfeafa4c63f692b15c82b7c80f8335"
 dependencies = [
  "itoa",
  "serde",
 ]
 
 [[package]]
 name = "serde_urlencoded"
 version = "0.7.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd"
 dependencies = [
  "form_urlencoded",
  "itoa",
  "ryu",
  "serde",
 ]
 
 [[package]]
 name = "sha1"
 version = "0.10.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3"
 dependencies = [
  "cfg-if",
  "cpufeatures",
  "digest",
 ]
 
 [[package]]
 name = "sha2"
 version = "0.10.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "479fb9d862239e610720565ca91403019f2f00410f1864c5aa7479b950a76ed8"
 dependencies = [
  "cfg-if",
  "cpufeatures",
  "digest",
 ]
 
 [[package]]
 name = "shlex"
 version = "1.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "43b2853a4d09f215c24cc5489c992ce46052d359b5109343cbafbf26bc62f8a3"
 
 [[package]]
 name = "slab"
 version = "0.4.9"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67"
 dependencies = [
  "autocfg",
 ]
 
 [[package]]
 name = "smallvec"
 version = "1.11.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "4dccd0940a2dcdf68d092b8cbab7dc0ad8fa938bf95787e1b916b0e3d0e8e970"
 
 [[package]]
 name = "socket2"
 version = "0.4.9"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662"
 dependencies = [
  "libc",
  "winapi",
 ]
 
 [[package]]
 name = "socket2"
 version = "0.5.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "2538b18701741680e0322a2302176d3253a35388e2e62f172f64f4f16605f877"
 dependencies = [
  "libc",
  "windows-sys",
 ]
 
 [[package]]
 name = "spin"
 version = "0.9.8"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67"
 dependencies = [
  "lock_api",
 ]
 
 [[package]]
 name = "stable-eyre"
 version = "0.2.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "556fec8c2da34c70b75f16d88df8a8cd7e652e567ff097b7e9df0022c8695cc4"
 dependencies = [
  "backtrace",
  "eyre",
  "indenter",
 ]
 
 [[package]]
 name = "stable_deref_trait"
 version = "1.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
 
 [[package]]
 name = "syn"
 version = "1.0.109"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237"
 dependencies = [
  "proc-macro2",
  "quote",
  "unicode-ident",
 ]
 
 [[package]]
 name = "syn"
 version = "2.0.29"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "c324c494eba9d92503e6f1ef2e6df781e78f6a7705a0202d9801b198807d518a"
 dependencies = [
  "proc-macro2",
  "quote",
  "unicode-ident",
 ]
 
 [[package]]
 name = "sync_wrapper"
 version = "0.1.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160"
 
 [[package]]
 name = "target-lexicon"
 version = "0.12.13"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "69758bda2e78f098e4ccb393021a0963bb3442eac05f135c30f61b7370bbafae"
 
 [[package]]
 name = "tempdir"
 version = "0.3.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "15f2b5fb00ccdf689e0149d1b1b3c03fead81c2b37735d812fa8bddbbf41b6d8"
 dependencies = [
  "rand 0.4.6",
  "remove_dir_all",
 ]
 
 [[package]]
 name = "tempfile"
 version = "3.8.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef"
 dependencies = [
  "cfg-if",
  "fastrand",
  "redox_syscall 0.3.5",
  "rustix",
  "windows-sys",
 ]
 
 [[package]]
 name = "termcolor"
 version = "1.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "be55cf8942feac5c765c2c993422806843c9a9a45d4d5c407ad6dd2ea95eb9b6"
 dependencies = [
  "winapi-util",
 ]
 
 [[package]]
 name = "thiserror"
 version = "1.0.47"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "97a802ec30afc17eee47b2855fc72e0c4cd62be9b4efe6591edde0ec5bd68d8f"
 dependencies = [
  "thiserror-impl",
 ]
 
 [[package]]
 name = "thiserror-impl"
 version = "1.0.47"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "6bb623b56e39ab7dcd4b1b98bb6c8f8d907ed255b18de254088016b27a8ee19b"
 dependencies = [
  "proc-macro2",
  "quote",
  "syn 2.0.29",
 ]
 
 [[package]]
 name = "tinyvec"
 version = "1.6.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50"
 dependencies = [
  "tinyvec_macros",
 ]
 
 [[package]]
 name = "tinyvec_macros"
 version = "0.1.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
 
 [[package]]
 name = "tokio"
 version = "1.32.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "17ed6077ed6cd6c74735e21f37eb16dc3935f96878b1fe961074089cc80893f9"
 dependencies = [
  "backtrace",
  "bytes",
  "libc",
  "mio",
  "num_cpus",
  "pin-project-lite",
  "socket2 0.5.3",
  "tokio-macros",
  "windows-sys",
 ]
 
 [[package]]
 name = "tokio-macros"
 version = "2.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e"
 dependencies = [
  "proc-macro2",
  "quote",
  "syn 2.0.29",
 ]
 
 [[package]]
 name = "tokio-tungstenite"
 version = "0.20.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "2b2dbec703c26b00d74844519606ef15d09a7d6857860f84ad223dec002ddea2"
 dependencies = [
  "futures-util",
  "log",
  "tokio",
  "tungstenite",
 ]
 
 [[package]]
 name = "topo_sort"
 version = "0.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "156552d3c80df430aaac98c605a4e0eb7da8d06029cce2d40b4a6b095a34b37e"
 
 [[package]]
 name = "tower"
 version = "0.4.13"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c"
 dependencies = [
  "futures-core",
  "futures-util",
  "pin-project",
  "pin-project-lite",
  "tokio",
  "tower-layer",
  "tower-service",
  "tracing",
 ]
 
 [[package]]
 name = "tower-layer"
 version = "0.3.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0"
 
 [[package]]
 name = "tower-service"
 version = "0.3.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52"
 
 [[package]]
 name = "tracing"
 version = "0.1.37"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8"
 dependencies = [
  "cfg-if",
  "log",
  "pin-project-lite",
  "tracing-core",
 ]
 
 [[package]]
 name = "tracing-core"
 version = "0.1.31"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a"
 dependencies = [
  "once_cell",
 ]
 
 [[package]]
 name = "try-lock"
 version = "0.2.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed"
 
 [[package]]
 name = "tungstenite"
 version = "0.20.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "e862a1c4128df0112ab625f55cd5c934bcb4312ba80b39ae4b4835a3fd58e649"
 dependencies = [
  "byteorder",
  "bytes",
  "data-encoding",
  "http",
  "httparse",
  "log",
  "rand 0.8.5",
  "sha1",
  "thiserror",
  "url",
  "utf-8",
 ]
 
 [[package]]
 name = "typenum"
 version = "1.16.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba"
 
 [[package]]
 name = "unicode-bidi"
 version = "0.3.13"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460"
 
 [[package]]
 name = "unicode-ident"
 version = "1.0.11"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c"
 
 [[package]]
 name = "unicode-normalization"
 version = "0.1.22"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921"
 dependencies = [
  "tinyvec",
 ]
 
 [[package]]
 name = "unicode-width"
 version = "0.1.10"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b"
 
 [[package]]
 name = "unindent"
 version = "0.2.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "c7de7d73e1754487cb58364ee906a499937a0dfabd86bcb980fa99ec8c8fa2ce"
 
 [[package]]
 name = "url"
 version = "2.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "50bff7831e19200a85b17131d085c25d7811bc4e186efdaf54bbd132994a88cb"
 dependencies = [
  "form_urlencoded",
  "idna",
  "percent-encoding",
 ]
 
 [[package]]
 name = "utf-8"
 version = "0.7.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9"
 
 [[package]]
 name = "vcpkg"
 version = "0.2.15"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
 
 [[package]]
 name = "version_check"
 version = "0.9.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
 
 [[package]]
 name = "want"
 version = "0.3.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e"
 dependencies = [
  "try-lock",
 ]
 
 [[package]]
 name = "wasi"
 version = "0.11.0+wasi-snapshot-preview1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
 
 [[package]]
 name = "which"
 version = "4.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "2441c784c52b289a054b7201fc93253e288f094e2f4be9058343127c4226a269"
 dependencies = [
  "either",
  "libc",
  "once_cell",
 ]
 
 [[package]]
 name = "winapi"
 version = "0.3.9"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
 dependencies = [
  "winapi-i686-pc-windows-gnu",
  "winapi-x86_64-pc-windows-gnu",
 ]
 
 [[package]]
 name = "winapi-i686-pc-windows-gnu"
 version = "0.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
 
 [[package]]
 name = "winapi-util"
 version = "0.1.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
 dependencies = [
  "winapi",
 ]
 
 [[package]]
 name = "winapi-x86_64-pc-windows-gnu"
 version = "0.4.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
 
 [[package]]
 name = "windows-sys"
 version = "0.48.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
 dependencies = [
  "windows-targets",
 ]
 
 [[package]]
 name = "windows-targets"
 version = "0.48.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c"
 dependencies = [
  "windows_aarch64_gnullvm",
  "windows_aarch64_msvc",
  "windows_i686_gnu",
  "windows_i686_msvc",
  "windows_x86_64_gnu",
  "windows_x86_64_gnullvm",
  "windows_x86_64_msvc",
 ]
 
 [[package]]
 name = "windows_aarch64_gnullvm"
 version = "0.48.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
 
 [[package]]
 name = "windows_aarch64_msvc"
 version = "0.48.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
 
 [[package]]
 name = "windows_i686_gnu"
 version = "0.48.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
 
 [[package]]
 name = "windows_i686_msvc"
 version = "0.48.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
 
 [[package]]
 name = "windows_x86_64_gnu"
 version = "0.48.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
 
 [[package]]
 name = "windows_x86_64_gnullvm"
 version = "0.48.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
 
 [[package]]
 name = "windows_x86_64_msvc"
 version = "0.48.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
 
 [[package]]
 name = "yansi"
 version = "0.5.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec"
diff --git a/chronik/chronik-bridge/src/util.rs b/chronik/chronik-bridge/src/util.rs
index ea2e5dca2..0f11a3aaa 100644
--- a/chronik/chronik-bridge/src/util.rs
+++ b/chronik/chronik-bridge/src/util.rs
@@ -1,74 +1,125 @@
 // Copyright (c) 2023 The Bitcoin developers
 // Distributed under the MIT software license, see the accompanying
 // file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
 //! Module for bridge utilities
 
 use bitcoinsuite_core::{
     script::Script,
     tx::{Coin, OutPoint, Tx, TxId, TxInput, TxMut, TxOutput},
 };
 
 use crate::ffi;
 
 /// Unwrap the given std::unique_ptr as a C++ reference, panicing if it's null.
 pub fn expect_unique_ptr<'ptr, T: cxx::memory::UniquePtrTarget>(
     name: &str,
     uptr: &'ptr cxx::UniquePtr<T>,
 ) -> &'ptr T {
     uptr.as_ref()
         .unwrap_or_else(|| panic!("{name} returned a null std::unique_ptr"))
 }
 
 impl From<ffi::Tx> for Tx {
     fn from(tx: ffi::Tx) -> Self {
         Tx::with_txid(
             TxId::from(tx.txid),
             TxMut {
                 version: tx.version,
                 inputs: tx.inputs.into_iter().map(TxInput::from).collect(),
                 outputs: tx.outputs.into_iter().map(TxOutput::from).collect(),
                 locktime: tx.locktime,
             },
         )
     }
 }
 
+impl From<TxMut> for ffi::Tx {
+    fn from(tx: TxMut) -> Self {
+        ffi::Tx {
+            txid: TxId::from_tx(&tx).to_bytes(),
+            version: tx.version,
+            inputs: tx.inputs.into_iter().map(ffi::TxInput::from).collect(),
+            outputs: tx.outputs.into_iter().map(ffi::TxOutput::from).collect(),
+            locktime: tx.locktime,
+        }
+    }
+}
+
 impl From<ffi::OutPoint> for OutPoint {
     fn from(value: ffi::OutPoint) -> Self {
         OutPoint {
             txid: TxId::from(value.txid),
             out_idx: value.out_idx,
         }
     }
 }
 
+impl From<OutPoint> for ffi::OutPoint {
+    fn from(value: OutPoint) -> Self {
+        ffi::OutPoint {
+            txid: value.txid.to_bytes(),
+            out_idx: value.out_idx,
+        }
+    }
+}
+
 impl From<ffi::TxInput> for TxInput {
     fn from(input: ffi::TxInput) -> Self {
         TxInput {
             prev_out: OutPoint::from(input.prev_out),
             script: Script::new(input.script.into()),
             sequence: input.sequence,
             coin: Some(Coin::from(input.coin)),
         }
     }
 }
 
+impl From<TxInput> for ffi::TxInput {
+    fn from(input: TxInput) -> Self {
+        ffi::TxInput {
+            prev_out: ffi::OutPoint::from(input.prev_out),
+            script: input.script.bytecode().to_vec(),
+            sequence: input.sequence,
+            coin: input.coin.map_or(ffi::Coin::default(), ffi::Coin::from),
+        }
+    }
+}
+
 impl From<ffi::TxOutput> for TxOutput {
     fn from(output: ffi::TxOutput) -> Self {
         TxOutput {
             value: output.value,
             script: Script::new(output.script.into()),
         }
     }
 }
 
+impl From<TxOutput> for ffi::TxOutput {
+    fn from(output: TxOutput) -> Self {
+        ffi::TxOutput {
+            value: output.value,
+            script: output.script.to_vec(),
+        }
+    }
+}
+
 impl From<ffi::Coin> for Coin {
     fn from(coin: ffi::Coin) -> Self {
         Coin {
             output: TxOutput::from(coin.output),
             height: coin.height,
             is_coinbase: coin.is_coinbase,
         }
     }
 }
+
+impl From<Coin> for ffi::Coin {
+    fn from(coin: Coin) -> Self {
+        ffi::Coin {
+            output: ffi::TxOutput::from(coin.output),
+            height: coin.height,
+            is_coinbase: coin.is_coinbase,
+        }
+    }
+}
diff --git a/chronik/chronik-http/src/server.rs b/chronik/chronik-http/src/server.rs
index d1e569b82..2cdb31bae 100644
--- a/chronik/chronik-http/src/server.rs
+++ b/chronik/chronik-http/src/server.rs
@@ -1,339 +1,391 @@
 // Copyright (c) 2022 The Bitcoin developers
 // Distributed under the MIT software license, see the accompanying
 // file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
 //! Module for [`ChronikServer`].
 
 use std::collections::HashMap;
 use std::time::Duration;
 use std::{net::SocketAddr, sync::Arc};
 
 use abc_rust_error::{Result, WrapErr};
 use axum::{
     extract::{Path, Query, WebSocketUpgrade},
     response::IntoResponse,
-    routing, Extension, Router,
+    routing::{self, MethodFilter},
+    Extension, Router,
 };
 use bitcoinsuite_core::tx::TxId;
 use chronik_indexer::{
     indexer::{ChronikIndexer, Node},
     pause::PauseNotify,
 };
 use chronik_proto::proto;
 use hyper::server::conn::AddrIncoming;
 use thiserror::Error;
 use tokio::sync::RwLock;
 
 use crate::{
     error::ReportError, handlers, protobuf::Protobuf,
     ws::handle_subscribe_socket,
 };
 
 /// Ref-counted indexer with read or write access
 pub type ChronikIndexerRef = Arc<RwLock<ChronikIndexer>>;
 /// Ref-counted access to the bitcoind node
 pub type NodeRef = Arc<Node>;
 /// Ref-counted pause notifier for Chronik indexing
 pub type PauseNotifyRef = Arc<PauseNotify>;
 
 /// Settings to tune Chronik
 #[derive(Clone, Debug)]
 pub struct ChronikSettings {
     /// Duration between WebSocket pings initiated by Chronik.
     pub ws_ping_interval: Duration,
 }
 
 /// Params defining what and where to serve for [`ChronikServer`].
 #[derive(Clone, Debug)]
 pub struct ChronikServerParams {
     /// Host address (port + IP) where to serve Chronik at.
     pub hosts: Vec<SocketAddr>,
     /// Indexer to read data from
     pub indexer: ChronikIndexerRef,
     /// Access to the bitcoind node
     pub node: NodeRef,
     /// Handle for pausing/resuming indexing any updates from the node
     pub pause_notify: PauseNotifyRef,
     /// Settings to tune Chronik
     pub settings: ChronikSettings,
 }
 
 /// Chronik HTTP server, holding all the data/handles required to serve an
 /// instance.
 #[derive(Debug)]
 pub struct ChronikServer {
     server_builders: Vec<hyper::server::Builder<AddrIncoming>>,
     indexer: ChronikIndexerRef,
     node: NodeRef,
     pause_notify: PauseNotifyRef,
     settings: ChronikSettings,
 }
 
 /// Errors for [`ChronikServer`].
 #[derive(Debug, Eq, Error, PartialEq)]
 pub enum ChronikServerError {
     /// Binding to host address failed
     #[error("Chronik failed binding to {0}: {1}")]
     FailedBindingAddress(SocketAddr, String),
 
     /// Serving Chronik failed
     #[error("Chronik failed serving: {0}")]
     ServingFailed(String),
 
     /// Query is neither a hex hash nor an integer string
     #[error("400: Not a hash or height: {0}")]
     NotHashOrHeight(String),
 
     /// Query is not a txid
     #[error("400: Not a txid: {0}")]
     NotTxId(String),
 
     /// Block not found in DB
     #[error("404: Block not found: {0}")]
     BlockNotFound(String),
 }
 
 use self::ChronikServerError::*;
 
 impl ChronikServer {
     /// Binds the Chronik server on the given hosts
     pub fn setup(params: ChronikServerParams) -> Result<Self> {
         let server_builders = params
             .hosts
             .into_iter()
             .map(|host| {
                 axum::Server::try_bind(&host).map_err(|err| {
                     FailedBindingAddress(host, err.to_string()).into()
                 })
             })
             .collect::<Result<Vec<_>>>()?;
         Ok(ChronikServer {
             server_builders,
             indexer: params.indexer,
             node: params.node,
             pause_notify: params.pause_notify,
             settings: params.settings,
         })
     }
 
     /// Serve a Chronik HTTP endpoint with the given parameters.
     pub async fn serve(self) -> Result<()> {
         let app = Self::make_router(
             self.indexer,
             self.node,
             self.pause_notify,
             self.settings,
         );
         let servers = self
             .server_builders
             .into_iter()
             .zip(std::iter::repeat(app))
             .map(|(server_builder, app)| {
                 Box::pin(async move {
                     server_builder
                         .serve(app.into_make_service())
                         .await
                         .map_err(|err| ServingFailed(err.to_string()))
                 })
             });
         let (result, _, _) = futures::future::select_all(servers).await;
         result?;
         Ok(())
     }
 
     fn make_router(
         indexer: ChronikIndexerRef,
         node: NodeRef,
         pause_notify: PauseNotifyRef,
         settings: ChronikSettings,
     ) -> Router {
         Router::new()
             .route("/blockchain-info", routing::get(handle_blockchain_info))
             .route("/block/:hash_or_height", routing::get(handle_block))
             .route("/block-txs/:hash_or_height", routing::get(handle_block_txs))
             .route("/blocks/:start/:end", routing::get(handle_block_range))
             .route("/chronik-info", routing::get(handle_chronik_info))
             .route("/tx/:txid", routing::get(handle_tx))
             .route("/token/:txid", routing::get(handle_token_info))
+            .route(
+                "/broadcast-tx",
+                routing::post(handle_broadcast_tx)
+                    .on(MethodFilter::OPTIONS, handle_post_options),
+            )
+            .route(
+                "/broadcast-txs",
+                routing::post(handle_broadcast_txs)
+                    .on(MethodFilter::OPTIONS, handle_post_options),
+            )
             .route("/raw-tx/:txid", routing::get(handle_raw_tx))
             .route(
                 "/script/:type/:payload/confirmed-txs",
                 routing::get(handle_script_confirmed_txs),
             )
             .route(
                 "/script/:type/:payload/history",
                 routing::get(handle_script_history),
             )
             .route(
                 "/script/:type/:payload/unconfirmed-txs",
                 routing::get(handle_script_unconfirmed_txs),
             )
             .route(
                 "/script/:type/:payload/utxos",
                 routing::get(handle_script_utxos),
             )
             .route("/ws", routing::get(handle_ws))
             .route("/pause", routing::get(handle_pause))
             .route("/resume", routing::get(handle_resume))
             .fallback(handlers::handle_not_found)
             .layer(Extension(indexer))
             .layer(Extension(node))
             .layer(Extension(pause_notify))
             .layer(Extension(settings))
     }
 }
 
 async fn handle_blockchain_info(
     Extension(indexer): Extension<ChronikIndexerRef>,
 ) -> Result<Protobuf<proto::BlockchainInfo>, ReportError> {
     let indexer = indexer.read().await;
     let blocks = indexer.blocks();
     Ok(Protobuf(blocks.blockchain_info()?))
 }
 
 async fn handle_chronik_info(
 ) -> Result<Protobuf<proto::ChronikInfo>, ReportError> {
     let this_chronik_version: String = env!("CARGO_PKG_VERSION").to_string();
     let chronik_info = proto::ChronikInfo {
         version: this_chronik_version,
     };
     Ok(Protobuf(chronik_info))
 }
 
 async fn handle_block_range(
     Path((start_height, end_height)): Path<(i32, i32)>,
     Extension(indexer): Extension<ChronikIndexerRef>,
 ) -> Result<Protobuf<proto::Blocks>, ReportError> {
     let indexer = indexer.read().await;
     let blocks = indexer.blocks();
     Ok(Protobuf(blocks.by_range(start_height, end_height)?))
 }
 
 async fn handle_block(
     Path(hash_or_height): Path<String>,
     Extension(indexer): Extension<ChronikIndexerRef>,
 ) -> Result<Protobuf<proto::Block>, ReportError> {
     let indexer = indexer.read().await;
     let blocks = indexer.blocks();
     Ok(Protobuf(blocks.by_hash_or_height(hash_or_height)?))
 }
 
 async fn handle_block_txs(
     Path(hash_or_height): Path<String>,
     Query(query_params): Query<HashMap<String, String>>,
     Extension(indexer): Extension<ChronikIndexerRef>,
 ) -> Result<Protobuf<proto::TxHistoryPage>, ReportError> {
     let indexer = indexer.read().await;
     Ok(Protobuf(
         handlers::handle_block_txs(hash_or_height, &query_params, &indexer)
             .await?,
     ))
 }
 
 async fn handle_tx(
     Path(txid): Path<String>,
     Extension(indexer): Extension<ChronikIndexerRef>,
 ) -> Result<Protobuf<proto::Tx>, ReportError> {
     let indexer = indexer.read().await;
     let txid = txid.parse::<TxId>().wrap_err(NotTxId(txid))?;
     Ok(Protobuf(indexer.txs().tx_by_id(txid)?))
 }
 
 async fn handle_token_info(
     Path(txid): Path<String>,
     Extension(indexer): Extension<ChronikIndexerRef>,
 ) -> Result<Protobuf<proto::TokenInfo>, ReportError> {
     let indexer = indexer.read().await;
     let txid = txid.parse::<TxId>().wrap_err(NotTxId(txid))?;
     Ok(Protobuf(indexer.txs().token_info(&txid)?))
 }
 
+async fn handle_broadcast_tx(
+    Extension(indexer): Extension<ChronikIndexerRef>,
+    Extension(node): Extension<NodeRef>,
+    Protobuf(request): Protobuf<proto::BroadcastTxRequest>,
+) -> Result<Protobuf<proto::BroadcastTxResponse>, ReportError> {
+    let indexer = indexer.read().await;
+    let txids = indexer
+        .broadcast(node.as_ref())
+        .broadcast_txs(&[request.raw_tx.into()], request.skip_token_checks)?;
+    Ok(Protobuf(proto::BroadcastTxResponse {
+        txid: txids[0].to_vec(),
+    }))
+}
+
+async fn handle_broadcast_txs(
+    Extension(indexer): Extension<ChronikIndexerRef>,
+    Extension(node): Extension<NodeRef>,
+    Protobuf(request): Protobuf<proto::BroadcastTxsRequest>,
+) -> Result<Protobuf<proto::BroadcastTxsResponse>, ReportError> {
+    let indexer = indexer.read().await;
+    let txids = indexer.broadcast(node.as_ref()).broadcast_txs(
+        &request
+            .raw_txs
+            .into_iter()
+            .map(Into::into)
+            .collect::<Vec<_>>(),
+        request.skip_token_checks,
+    )?;
+    Ok(Protobuf(proto::BroadcastTxsResponse {
+        txids: txids.into_iter().map(|txid| txid.to_vec()).collect(),
+    }))
+}
+
 async fn handle_raw_tx(
     Path(txid): Path<String>,
     Extension(indexer): Extension<ChronikIndexerRef>,
 ) -> Result<Protobuf<proto::RawTx>, ReportError> {
     let indexer = indexer.read().await;
     let txid = txid.parse::<TxId>().wrap_err(NotTxId(txid))?;
     Ok(Protobuf(indexer.txs().raw_tx_by_id(&txid)?))
 }
 
 async fn handle_script_confirmed_txs(
     Path((script_type, payload)): Path<(String, String)>,
     Query(query_params): Query<HashMap<String, String>>,
     Extension(indexer): Extension<ChronikIndexerRef>,
 ) -> Result<Protobuf<proto::TxHistoryPage>, ReportError> {
     let indexer = indexer.read().await;
     Ok(Protobuf(
         handlers::handle_script_confirmed_txs(
             &script_type,
             &payload,
             &query_params,
             &indexer,
         )
         .await?,
     ))
 }
 
 async fn handle_script_history(
     Path((script_type, payload)): Path<(String, String)>,
     Query(query_params): Query<HashMap<String, String>>,
     Extension(indexer): Extension<ChronikIndexerRef>,
 ) -> Result<Protobuf<proto::TxHistoryPage>, ReportError> {
     let indexer = indexer.read().await;
     Ok(Protobuf(
         handlers::handle_script_history(
             &script_type,
             &payload,
             &query_params,
             &indexer,
         )
         .await?,
     ))
 }
 
 async fn handle_script_unconfirmed_txs(
     Path((script_type, payload)): Path<(String, String)>,
     Extension(indexer): Extension<ChronikIndexerRef>,
 ) -> Result<Protobuf<proto::TxHistoryPage>, ReportError> {
     let indexer = indexer.read().await;
     Ok(Protobuf(
         handlers::handle_script_unconfirmed_txs(
             &script_type,
             &payload,
             &indexer,
         )
         .await?,
     ))
 }
 
 async fn handle_script_utxos(
     Path((script_type, payload)): Path<(String, String)>,
     Extension(indexer): Extension<ChronikIndexerRef>,
 ) -> Result<Protobuf<proto::ScriptUtxos>, ReportError> {
     let indexer = indexer.read().await;
     Ok(Protobuf(
         handlers::handle_script_utxos(&script_type, &payload, &indexer).await?,
     ))
 }
 
 async fn handle_pause(
     Extension(pause_notify): Extension<PauseNotifyRef>,
 ) -> Result<Protobuf<proto::Empty>, ReportError> {
     pause_notify.pause()?;
     Ok(Protobuf(proto::Empty {}))
 }
 
 async fn handle_resume(
     Extension(pause_notify): Extension<PauseNotifyRef>,
 ) -> Result<Protobuf<proto::Empty>, ReportError> {
     pause_notify.resume()?;
     Ok(Protobuf(proto::Empty {}))
 }
 
 async fn handle_ws(
     ws: WebSocketUpgrade,
     Extension(indexer): Extension<ChronikIndexerRef>,
     Extension(settings): Extension<ChronikSettings>,
 ) -> impl IntoResponse {
     ws.on_upgrade(|ws| handle_subscribe_socket(ws, indexer, settings))
 }
+
+async fn handle_post_options(
+) -> Result<axum::http::Response<axum::body::Body>, ReportError> {
+    axum::http::Response::builder()
+        .header("Allow", "OPTIONS, HEAD, POST")
+        .body(axum::body::Body::empty())
+        .map_err(|err| ReportError(err.into()))
+}
diff --git a/chronik/chronik-indexer/Cargo.toml b/chronik/chronik-indexer/Cargo.toml
index 2ca664015..d957e8f59 100644
--- a/chronik/chronik-indexer/Cargo.toml
+++ b/chronik/chronik-indexer/Cargo.toml
@@ -1,45 +1,48 @@
 # Copyright (c) 2022 The Bitcoin developers
 
 [package]
 name = "chronik-indexer"
 version = "0.1.0"
 edition = "2021"
 rust-version.workspace = true
 license = "MIT"
 
 [dependencies]
 abc-rust-lint = { path = "../abc-rust-lint" }
 abc-rust-error = { path = "../abc-rust-error" }
 
 bitcoinsuite-core = { path = "../bitcoinsuite-core" }
 bitcoinsuite-slp = { path = "../bitcoinsuite-slp" }
 
 chronik-bridge = { path = "../chronik-bridge" }
 chronik-db = { path = "../chronik-db" }
 chronik-proto = { path = "../chronik-proto" }
 chronik-util = { path = "../chronik-util" }
 
+# Efficient byte strings, with ref counted substrings
+bytes = "1.4"
+
 # Bridge to C++
 cxx = "1.0"
 
 # Protobuf en-/decoding
 prost = "0.11"
 
 # Derive error enums
 thiserror = "1.0"
 
 # Async runtime
 [dependencies.tokio]
 version = "1.25"
 features = ["sync", "rt", "rt-multi-thread", "macros"]
 
 [dev-dependencies]
 # Colorful diffs for assertions
 pretty_assertions = "1.0"
 
 # Temporary directory that's deleted when dropped
 tempdir = "0.3"
 
 [build-dependencies]
 # Build structs for protobuf messages from a *.proto file
 prost-build = "0.11"
diff --git a/chronik/chronik-indexer/src/indexer.rs b/chronik/chronik-indexer/src/indexer.rs
index a47e1ebf2..9511c16c8 100644
--- a/chronik/chronik-indexer/src/indexer.rs
+++ b/chronik/chronik-indexer/src/indexer.rs
@@ -1,766 +1,779 @@
 // Copyright (c) 2022 The Bitcoin developers
 // Distributed under the MIT software license, see the accompanying
 // file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
 //! Module containing [`ChronikIndexer`] to index blocks and txs.
 
 use std::{io::Write, path::PathBuf};
 
 use abc_rust_error::{Result, WrapErr};
 use bitcoinsuite_core::{
     block::BlockHash,
     tx::{Tx, TxId},
 };
 use chronik_bridge::{ffi, util::expect_unique_ptr};
 use chronik_db::{
     db::{Db, WriteBatch},
     groups::{ScriptGroup, ScriptHistoryWriter, ScriptUtxoWriter},
     index_tx::prepare_indexed_txs,
     io::{
         merge, token::TokenWriter, BlockHeight, BlockReader, BlockStatsWriter,
         BlockTxs, BlockWriter, DbBlock, MetadataReader, MetadataWriter,
         SchemaVersion, SpentByWriter, TxEntry, TxWriter,
     },
     mem::{MemData, MemDataConf, Mempool, MempoolTx},
 };
 use chronik_util::{log, log_chronik};
 use thiserror::Error;
 use tokio::sync::RwLock;
 
 use crate::{
     avalanche::Avalanche,
     indexer::ChronikIndexerError::*,
-    query::{QueryBlocks, QueryGroupHistory, QueryGroupUtxos, QueryTxs},
+    query::{
+        QueryBlocks, QueryBroadcast, QueryGroupHistory, QueryGroupUtxos,
+        QueryTxs,
+    },
     subs::{BlockMsg, BlockMsgType, Subs},
     subs_group::TxMsgType,
 };
 
 const CURRENT_INDEXER_VERSION: SchemaVersion = 9;
 
 /// Params for setting up a [`ChronikIndexer`] instance.
 #[derive(Clone)]
 pub struct ChronikIndexerParams {
     /// Folder where the node stores its data, net-dependent.
     pub datadir_net: PathBuf,
     /// Whether to clear the DB before opening the DB, e.g. when reindexing.
     pub wipe_db: bool,
     /// Whether to output Chronik performance statistics into a perf/ folder
     pub enable_perf_stats: bool,
 }
 
 /// Struct for indexing blocks and txs. Maintains db handles and mempool.
 #[derive(Debug)]
 pub struct ChronikIndexer {
     db: Db,
     mem_data: MemData,
     mempool: Mempool,
     script_group: ScriptGroup,
     avalanche: Avalanche,
     subs: RwLock<Subs>,
     perf_path: Option<PathBuf>,
 }
 
 /// Access to the bitcoind node.
 #[derive(Debug)]
 pub struct Node {
     /// FFI bridge to the node.
     pub bridge: cxx::UniquePtr<ffi::ChronikBridge>,
 }
 
 /// Block to be indexed by Chronik.
 #[derive(Clone, Debug, Default, Eq, PartialEq)]
 pub struct ChronikBlock {
     /// Data about the block (w/o txs)
     pub db_block: DbBlock,
     /// Txs in the block, with locations of where they are stored on disk.
     pub block_txs: BlockTxs,
     /// Block size in bytes.
     pub size: u64,
     /// Txs in the block, with inputs/outputs so we can group them.
     pub txs: Vec<Tx>,
 }
 
 /// Errors for [`BlockWriter`] and [`BlockReader`].
 #[derive(Debug, Eq, Error, PartialEq)]
 pub enum ChronikIndexerError {
     /// Failed creating the folder for the indexes
     #[error("Failed creating path {0}")]
     CreateDirFailed(PathBuf),
 
     /// Cannot rewind blocks that bitcoind doesn't have
     #[error(
         "Cannot rewind Chronik, it contains block {0} that the node doesn't \
          have. You may need to use -reindex/-chronikreindex, or delete \
          indexes/chronik and restart"
     )]
     CannotRewindChronik(BlockHash),
 
     /// Lower block doesn't exist but higher block does
     #[error(
         "Inconsistent DB: Block {missing} doesn't exist, but {exists} does"
     )]
     BlocksBelowMissing {
         /// Lower height that is missing
         missing: BlockHeight,
         /// Higher height that exists
         exists: BlockHeight,
     },
 
     /// Corrupted schema version
     #[error(
         "Corrupted schema version in the Chronik database, consider running \
          -reindex/-chronikreindex"
     )]
     CorruptedSchemaVersion,
 
     /// Missing schema version for non-empty database
     #[error(
         "Missing schema version in non-empty Chronik database, consider \
          running -reindex/-chronikreindex"
     )]
     MissingSchemaVersion,
 
     /// This Chronik instance is outdated
     #[error(
         "Chronik outdated: Chronik has version {}, but the database has \
          version {0}. Upgrade your node to the appropriate version.",
         CURRENT_INDEXER_VERSION
     )]
     ChronikOutdated(SchemaVersion),
 
     /// Database is outdated
     #[error(
         "DB outdated: Chronik has version {}, but the database has version \
          {0}. -reindex/-chronikreindex to reindex the database to the new \
          version.",
         CURRENT_INDEXER_VERSION
     )]
     DatabaseOutdated(SchemaVersion),
 }
 
 impl ChronikIndexer {
     /// Setup the indexer with the given parameters, e.g. open the DB etc.
     pub fn setup(params: ChronikIndexerParams) -> Result<Self> {
         let indexes_path = params.datadir_net.join("indexes");
         let perf_path = params.datadir_net.join("perf");
         if !indexes_path.exists() {
             std::fs::create_dir(&indexes_path)
                 .wrap_err_with(|| CreateDirFailed(indexes_path.clone()))?;
         }
         if params.enable_perf_stats && !perf_path.exists() {
             std::fs::create_dir(&perf_path)
                 .wrap_err_with(|| CreateDirFailed(perf_path.clone()))?;
         }
         let db_path = indexes_path.join("chronik");
         if params.wipe_db {
             log!("Wiping Chronik at {}\n", db_path.to_string_lossy());
             Db::destroy(&db_path)?;
         }
         log_chronik!("Opening Chronik at {}\n", db_path.to_string_lossy());
         let db = Db::open(&db_path)?;
         verify_schema_version(&db)?;
         let mempool = Mempool::new(ScriptGroup);
         Ok(ChronikIndexer {
             db,
             mempool,
             mem_data: MemData::new(MemDataConf {}),
             script_group: ScriptGroup,
             avalanche: Avalanche::default(),
             subs: RwLock::new(Subs::new(ScriptGroup)),
             perf_path: params.enable_perf_stats.then_some(perf_path),
         })
     }
 
     /// Resync Chronik index to the node
     pub fn resync_indexer(
         &mut self,
         bridge: &ffi::ChronikBridge,
     ) -> Result<()> {
         let block_reader = BlockReader::new(&self.db)?;
         let indexer_tip = block_reader.tip()?;
         let Ok(node_tip_index) = bridge.get_chain_tip() else {
             if let Some(indexer_tip) = &indexer_tip {
                 return Err(
                     CannotRewindChronik(indexer_tip.hash.clone()).into()
                 );
             }
             return Ok(());
         };
         let node_tip_info = ffi::get_block_info(node_tip_index);
         let node_height = node_tip_info.height;
         let node_tip_hash = BlockHash::from(node_tip_info.hash);
         let start_height = match indexer_tip {
             Some(tip) if tip.hash != node_tip_hash => {
                 let indexer_tip_hash = tip.hash.clone();
                 let indexer_height = tip.height;
                 log!(
                     "Node and Chronik diverged, node is on block \
                      {node_tip_hash} at height {node_height}, and Chronik is \
                      on block {indexer_tip_hash} at height {indexer_height}.\n"
                 );
                 let indexer_tip_index = bridge
                     .lookup_block_index(tip.hash.to_bytes())
                     .map_err(|_| CannotRewindChronik(tip.hash.clone()))?;
                 self.rewind_indexer(bridge, indexer_tip_index, &tip)?
             }
             Some(tip) => tip.height,
             None => {
                 log!(
                     "Chronik database empty, syncing to block {node_tip_hash} \
                      at height {node_height}.\n"
                 );
                 -1
             }
         };
         let tip_height = node_tip_info.height;
         for height in start_height + 1..=tip_height {
             if ffi::shutdown_requested() {
                 log!("Stopped re-sync adding blocks\n");
                 return Ok(());
             }
             let block_index = ffi::get_block_ancestor(node_tip_index, height)?;
             let ffi_block = bridge.load_block(block_index)?;
             let ffi_block = expect_unique_ptr("load_block", &ffi_block);
             let block = self.make_chronik_block(ffi_block, block_index)?;
             let hash = block.db_block.hash.clone();
             self.handle_block_connected(block)?;
             log_chronik!(
                 "Added block {hash}, height {height}/{tip_height} to Chronik\n"
             );
             if height % 100 == 0 {
                 log!(
                     "Synced Chronik up to block {hash} at height \
                      {height}/{tip_height}\n"
                 );
             }
         }
         log!(
             "Chronik completed re-syncing with the node, both are now at \
              block {node_tip_hash} at height {node_height}.\n"
         );
         if let Some(perf_path) = &self.perf_path {
             let mut resync_stats =
                 std::fs::File::create(perf_path.join("resync_stats.txt"))?;
             write!(&mut resync_stats, "{:#.3?}", self.mem_data.stats())?;
         }
         Ok(())
     }
 
     fn rewind_indexer(
         &mut self,
         bridge: &ffi::ChronikBridge,
         indexer_tip_index: &ffi::CBlockIndex,
         indexer_db_tip: &DbBlock,
     ) -> Result<BlockHeight> {
         let indexer_height = indexer_db_tip.height;
         let fork_block_index = bridge
             .find_fork(indexer_tip_index)
             .map_err(|_| CannotRewindChronik(indexer_db_tip.hash.clone()))?;
         let fork_info = ffi::get_block_info(fork_block_index);
         let fork_block_hash = BlockHash::from(fork_info.hash);
         let fork_height = fork_info.height;
         let revert_height = fork_height + 1;
         log!(
             "The last common block is {fork_block_hash} at height \
              {fork_height}.\n"
         );
         log!("Reverting Chronik blocks {revert_height} to {indexer_height}.\n");
         for height in (revert_height..indexer_height).rev() {
             if ffi::shutdown_requested() {
                 log!("Stopped re-sync rewinding blocks\n");
                 // return MAX here so we don't add any blocks
                 return Ok(BlockHeight::MAX);
             }
             let db_block = BlockReader::new(&self.db)?
                 .by_height(height)?
                 .ok_or(BlocksBelowMissing {
                     missing: height,
                     exists: indexer_height,
                 })?;
             let block_index = bridge
                 .lookup_block_index(db_block.hash.to_bytes())
                 .map_err(|_| CannotRewindChronik(db_block.hash))?;
             let ffi_block = bridge.load_block(block_index)?;
             let ffi_block = expect_unique_ptr("load_block", &ffi_block);
             let block = self.make_chronik_block(ffi_block, block_index)?;
             self.handle_block_disconnected(block)?;
         }
         Ok(fork_info.height)
     }
 
     /// Add transaction to the indexer's mempool.
     pub fn handle_tx_added_to_mempool(
         &mut self,
         mempool_tx: MempoolTx,
     ) -> Result<()> {
         self.subs
             .get_mut()
             .handle_tx_event(&mempool_tx.tx, TxMsgType::AddedToMempool);
         self.mempool.insert(&self.db, mempool_tx)?;
         Ok(())
     }
 
     /// Remove tx from the indexer's mempool, e.g. by a conflicting tx, expiry
     /// etc. This is not called when the transaction has been mined (and thus
     /// also removed from the mempool).
     pub fn handle_tx_removed_from_mempool(&mut self, txid: TxId) -> Result<()> {
         let mempool_tx = self.mempool.remove(txid)?;
         self.subs
             .get_mut()
             .handle_tx_event(&mempool_tx.tx, TxMsgType::RemovedFromMempool);
         Ok(())
     }
 
     /// Add the block to the index.
     pub fn handle_block_connected(
         &mut self,
         block: ChronikBlock,
     ) -> Result<()> {
         let height = block.db_block.height;
         let mut batch = WriteBatch::default();
         let block_writer = BlockWriter::new(&self.db)?;
         let tx_writer = TxWriter::new(&self.db)?;
         let block_stats_writer = BlockStatsWriter::new(&self.db)?;
         let script_history_writer =
             ScriptHistoryWriter::new(&self.db, self.script_group.clone())?;
         let script_utxo_writer =
             ScriptUtxoWriter::new(&self.db, self.script_group.clone())?;
         let spent_by_writer = SpentByWriter::new(&self.db)?;
         let token_writer = TokenWriter::new(&self.db)?;
         block_writer.insert(&mut batch, &block.db_block)?;
         let first_tx_num = tx_writer.insert(
             &mut batch,
             &block.block_txs,
             &mut self.mem_data.txs,
         )?;
         let index_txs =
             prepare_indexed_txs(&self.db, first_tx_num, &block.txs)?;
         block_stats_writer
             .insert(&mut batch, height, block.size, &index_txs)?;
         script_history_writer.insert(
             &mut batch,
             &index_txs,
             &(),
             &mut self.mem_data.script_history,
         )?;
         script_utxo_writer.insert(
             &mut batch,
             &index_txs,
             &(),
             &mut self.mem_data.script_utxos,
         )?;
         spent_by_writer.insert(
             &mut batch,
             &index_txs,
             &mut self.mem_data.spent_by,
         )?;
         token_writer.insert(&mut batch, &index_txs)?;
         self.db.write_batch(batch)?;
         for tx in &block.block_txs.txs {
             self.mempool.remove_mined(&tx.txid)?;
         }
         merge::check_for_errors()?;
         let subs = self.subs.get_mut();
         subs.broadcast_block_msg(BlockMsg {
             msg_type: BlockMsgType::Connected,
             hash: block.db_block.hash,
             height: block.db_block.height,
         });
         for tx in &block.txs {
             subs.handle_tx_event(tx, TxMsgType::Confirmed);
         }
         Ok(())
     }
 
     /// Remove the block from the index.
     pub fn handle_block_disconnected(
         &mut self,
         block: ChronikBlock,
     ) -> Result<()> {
         let mut batch = WriteBatch::default();
         let block_writer = BlockWriter::new(&self.db)?;
         let tx_writer = TxWriter::new(&self.db)?;
         let block_stats_writer = BlockStatsWriter::new(&self.db)?;
         let script_history_writer =
             ScriptHistoryWriter::new(&self.db, self.script_group.clone())?;
         let script_utxo_writer =
             ScriptUtxoWriter::new(&self.db, self.script_group.clone())?;
         let spent_by_writer = SpentByWriter::new(&self.db)?;
         let token_writer = TokenWriter::new(&self.db)?;
         block_writer.delete(&mut batch, &block.db_block)?;
         let first_tx_num = tx_writer.delete(
             &mut batch,
             &block.block_txs,
             &mut self.mem_data.txs,
         )?;
         let index_txs =
             prepare_indexed_txs(&self.db, first_tx_num, &block.txs)?;
         block_stats_writer.delete(&mut batch, block.db_block.height);
         script_history_writer.delete(
             &mut batch,
             &index_txs,
             &(),
             &mut self.mem_data.script_history,
         )?;
         script_utxo_writer.delete(
             &mut batch,
             &index_txs,
             &(),
             &mut self.mem_data.script_utxos,
         )?;
         spent_by_writer.delete(
             &mut batch,
             &index_txs,
             &mut self.mem_data.spent_by,
         )?;
         token_writer.delete(&mut batch, &index_txs)?;
         self.avalanche.disconnect_block(block.db_block.height)?;
         self.db.write_batch(batch)?;
         let subs = self.subs.get_mut();
         subs.broadcast_block_msg(BlockMsg {
             msg_type: BlockMsgType::Disconnected,
             hash: block.db_block.hash,
             height: block.db_block.height,
         });
         Ok(())
     }
 
     /// Block finalized with Avalanche.
     pub fn handle_block_finalized(
         &mut self,
         block: ChronikBlock,
     ) -> Result<()> {
         self.avalanche.finalize_block(block.db_block.height)?;
         let subs = self.subs.get_mut();
         subs.broadcast_block_msg(BlockMsg {
             msg_type: BlockMsgType::Finalized,
             hash: block.db_block.hash,
             height: block.db_block.height,
         });
         for tx in &block.txs {
             subs.handle_tx_event(tx, TxMsgType::Finalized);
         }
         Ok(())
     }
 
+    /// Return [`QueryBroadcast`] to broadcast tx to the network.
+    pub fn broadcast<'a>(&'a self, node: &'a Node) -> QueryBroadcast<'a> {
+        QueryBroadcast {
+            db: &self.db,
+            avalanche: &self.avalanche,
+            mempool: &self.mempool,
+            node,
+        }
+    }
+
     /// Return [`QueryBlocks`] to read blocks from the DB.
     pub fn blocks(&self) -> QueryBlocks<'_> {
         QueryBlocks {
             db: &self.db,
             avalanche: &self.avalanche,
             mempool: &self.mempool,
         }
     }
 
     /// Return [`QueryTxs`] to return txs from mempool/DB.
     pub fn txs(&self) -> QueryTxs<'_> {
         QueryTxs {
             db: &self.db,
             avalanche: &self.avalanche,
             mempool: &self.mempool,
         }
     }
 
     /// Return [`QueryGroupHistory`] for scripts to query the tx history of
     /// scripts.
     pub fn script_history(&self) -> Result<QueryGroupHistory<'_, ScriptGroup>> {
         Ok(QueryGroupHistory {
             db: &self.db,
             avalanche: &self.avalanche,
             mempool: &self.mempool,
             mempool_history: self.mempool.script_history(),
             group: self.script_group.clone(),
         })
     }
 
     /// Return [`QueryGroupUtxos`] for scripts to query the utxos of scripts.
     pub fn script_utxos(&self) -> Result<QueryGroupUtxos<'_, ScriptGroup>> {
         Ok(QueryGroupUtxos {
             db: &self.db,
             avalanche: &self.avalanche,
             mempool: &self.mempool,
             mempool_utxos: self.mempool.script_utxos(),
             group: self.script_group.clone(),
         })
     }
 
     /// Subscribers, behind read/write lock
     pub fn subs(&self) -> &RwLock<Subs> {
         &self.subs
     }
 
     /// Build the ChronikBlock from the CBlockIndex
     pub fn make_chronik_block(
         &self,
         block: &ffi::CBlock,
         bindex: &ffi::CBlockIndex,
     ) -> Result<ChronikBlock> {
         let block = ffi::bridge_block(block, bindex)?;
         let db_block = DbBlock {
             hash: BlockHash::from(block.hash),
             prev_hash: BlockHash::from(block.prev_hash),
             height: block.height,
             n_bits: block.n_bits,
             timestamp: block.timestamp,
             file_num: block.file_num,
             data_pos: block.data_pos,
         };
         let block_txs = BlockTxs {
             block_height: block.height,
             txs: block
                 .txs
                 .iter()
                 .map(|tx| {
                     let txid = TxId::from(tx.tx.txid);
                     TxEntry {
                         txid,
                         data_pos: tx.data_pos,
                         undo_pos: tx.undo_pos,
                         time_first_seen: match self.mempool.tx(&txid) {
                             Some(tx) => tx.time_first_seen,
                             None => 0,
                         },
                         is_coinbase: tx.undo_pos == 0,
                     }
                 })
                 .collect(),
         };
         let txs = block
             .txs
             .into_iter()
             .map(|block_tx| Tx::from(block_tx.tx))
             .collect::<Vec<_>>();
         Ok(ChronikBlock {
             db_block,
             block_txs,
             size: block.size,
             txs,
         })
     }
 }
 
 fn verify_schema_version(db: &Db) -> Result<()> {
     let metadata_reader = MetadataReader::new(db)?;
     let metadata_writer = MetadataWriter::new(db)?;
     let is_empty = db.is_db_empty()?;
     match metadata_reader
         .schema_version()
         .wrap_err(CorruptedSchemaVersion)?
     {
         Some(schema_version) => {
             assert!(!is_empty, "Empty DB can't have a schema version");
             if schema_version > CURRENT_INDEXER_VERSION {
                 return Err(ChronikOutdated(schema_version).into());
             }
             if schema_version < CURRENT_INDEXER_VERSION {
                 return Err(DatabaseOutdated(schema_version).into());
             }
         }
         None => {
             if !is_empty {
                 return Err(MissingSchemaVersion.into());
             }
             let mut batch = WriteBatch::default();
             metadata_writer
                 .update_schema_version(&mut batch, CURRENT_INDEXER_VERSION)?;
             db.write_batch(batch)?;
         }
     }
     log!("Chronik has version {CURRENT_INDEXER_VERSION}\n");
     Ok(())
 }
 
 impl std::fmt::Debug for ChronikIndexerParams {
     fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
         f.debug_struct("ChronikIndexerParams")
             .field("datadir_net", &self.datadir_net)
             .field("wipe_db", &self.wipe_db)
             .field("fn_compress_script", &"..")
             .finish()
     }
 }
 
 #[cfg(test)]
 mod tests {
     use abc_rust_error::Result;
     use bitcoinsuite_core::block::BlockHash;
     use chronik_db::{
         db::{Db, WriteBatch, CF_META},
         io::{BlockReader, BlockTxs, DbBlock, MetadataReader, MetadataWriter},
     };
     use pretty_assertions::assert_eq;
 
     use crate::indexer::{
         ChronikBlock, ChronikIndexer, ChronikIndexerError,
         ChronikIndexerParams, CURRENT_INDEXER_VERSION,
     };
 
     #[test]
     fn test_indexer() -> Result<()> {
         let tempdir = tempdir::TempDir::new("chronik-indexer--indexer")?;
         let datadir_net = tempdir.path().join("regtest");
         let params = ChronikIndexerParams {
             datadir_net: datadir_net.clone(),
             wipe_db: false,
             enable_perf_stats: false,
         };
         // regtest folder doesn't exist yet -> error
         assert_eq!(
             ChronikIndexer::setup(params.clone())
                 .unwrap_err()
                 .downcast::<ChronikIndexerError>()?,
             ChronikIndexerError::CreateDirFailed(datadir_net.join("indexes")),
         );
 
         // create regtest folder, setup will work now
         std::fs::create_dir(&datadir_net)?;
         let mut indexer = ChronikIndexer::setup(params.clone())?;
         // indexes and indexes/chronik folder now exist
         assert!(datadir_net.join("indexes").exists());
         assert!(datadir_net.join("indexes").join("chronik").exists());
 
         // DB is empty
         assert_eq!(BlockReader::new(&indexer.db)?.by_height(0)?, None);
         let block = ChronikBlock {
             db_block: DbBlock {
                 hash: BlockHash::from([4; 32]),
                 prev_hash: BlockHash::from([0; 32]),
                 height: 0,
                 n_bits: 0x1deadbef,
                 timestamp: 1234567890,
                 file_num: 0,
                 data_pos: 1337,
             },
             block_txs: BlockTxs {
                 block_height: 0,
                 txs: vec![],
             },
             size: 285,
             txs: vec![],
         };
 
         // Add block
         indexer.handle_block_connected(block.clone())?;
         assert_eq!(
             BlockReader::new(&indexer.db)?.by_height(0)?,
             Some(block.db_block.clone())
         );
 
         // Remove block again
         indexer.handle_block_disconnected(block.clone())?;
         assert_eq!(BlockReader::new(&indexer.db)?.by_height(0)?, None);
 
         // Add block then wipe, block not there
         indexer.handle_block_connected(block)?;
         std::mem::drop(indexer);
         let indexer = ChronikIndexer::setup(ChronikIndexerParams {
             wipe_db: true,
             ..params
         })?;
         assert_eq!(BlockReader::new(&indexer.db)?.by_height(0)?, None);
 
         Ok(())
     }
 
     #[test]
     fn test_schema_version() -> Result<()> {
         let dir = tempdir::TempDir::new("chronik-indexer--schema_version")?;
         let chronik_path = dir.path().join("indexes").join("chronik");
         let params = ChronikIndexerParams {
             datadir_net: dir.path().to_path_buf(),
             wipe_db: false,
             enable_perf_stats: false,
         };
 
         // Setting up DB first time sets the schema version
         ChronikIndexer::setup(params.clone())?;
         {
             let db = Db::open(&chronik_path)?;
             assert_eq!(
                 MetadataReader::new(&db)?.schema_version()?,
                 Some(CURRENT_INDEXER_VERSION)
             );
         }
         // Opening DB again works fine
         ChronikIndexer::setup(params.clone())?;
 
         // Override DB schema version to 0
         {
             let db = Db::open(&chronik_path)?;
             let mut batch = WriteBatch::default();
             MetadataWriter::new(&db)?.update_schema_version(&mut batch, 0)?;
             db.write_batch(batch)?;
         }
         // -> DB too old
         assert_eq!(
             ChronikIndexer::setup(params.clone())
                 .unwrap_err()
                 .downcast::<ChronikIndexerError>()?,
             ChronikIndexerError::DatabaseOutdated(0),
         );
 
         // Override DB schema version to CURRENT_INDEXER_VERSION + 1
         {
             let db = Db::open(&chronik_path)?;
             let mut batch = WriteBatch::default();
             MetadataWriter::new(&db)?.update_schema_version(
                 &mut batch,
                 CURRENT_INDEXER_VERSION + 1,
             )?;
             db.write_batch(batch)?;
         }
         // -> Chronik too old
         assert_eq!(
             ChronikIndexer::setup(params.clone())
                 .unwrap_err()
                 .downcast::<ChronikIndexerError>()?,
             ChronikIndexerError::ChronikOutdated(CURRENT_INDEXER_VERSION + 1),
         );
 
         // Corrupt schema version
         {
             let db = Db::open(&chronik_path)?;
             let cf_meta = db.cf(CF_META)?;
             let mut batch = WriteBatch::default();
             batch.put_cf(cf_meta, b"SCHEMA_VERSION", [0xff]);
             db.write_batch(batch)?;
         }
         assert_eq!(
             ChronikIndexer::setup(params.clone())
                 .unwrap_err()
                 .downcast::<ChronikIndexerError>()?,
             ChronikIndexerError::CorruptedSchemaVersion,
         );
 
         // New db path, but has existing data
         let new_dir = dir.path().join("new");
         let new_chronik_path = new_dir.join("indexes").join("chronik");
         std::fs::create_dir_all(&new_chronik_path)?;
         let new_params = ChronikIndexerParams {
             datadir_net: new_dir,
             wipe_db: false,
             ..params
         };
         {
             // new db with obscure field in meta
             let db = Db::open(&new_chronik_path)?;
             let mut batch = WriteBatch::default();
             batch.put_cf(db.cf(CF_META)?, b"FOO", b"BAR");
             db.write_batch(batch)?;
         }
         // Error: non-empty DB without schema version
         assert_eq!(
             ChronikIndexer::setup(new_params.clone())
                 .unwrap_err()
                 .downcast::<ChronikIndexerError>()?,
             ChronikIndexerError::MissingSchemaVersion,
         );
         // with wipe it works
         ChronikIndexer::setup(ChronikIndexerParams {
             wipe_db: true,
             ..new_params
         })?;
 
         Ok(())
     }
 }
diff --git a/chronik/chronik-indexer/src/query/broadcast.rs b/chronik/chronik-indexer/src/query/broadcast.rs
new file mode 100644
index 000000000..48c4caa2d
--- /dev/null
+++ b/chronik/chronik-indexer/src/query/broadcast.rs
@@ -0,0 +1,132 @@
+// Copyright (c) 2024 The Bitcoin developers
+// Distributed under the MIT software license, see the accompanying
+// file COPYING or http://www.opensource.org/licenses/mit-license.php.
+
+use abc_rust_error::Result;
+use bitcoinsuite_core::{
+    error::DataError,
+    ser::BitcoinSer,
+    tx::{Tx, TxId, TxMut},
+};
+use bytes::Bytes;
+use chronik_bridge::ffi;
+use chronik_db::{db::Db, mem::Mempool};
+use thiserror::Error;
+
+use crate::{
+    avalanche::Avalanche,
+    indexer::Node,
+    query::{QueryBroadcastError::*, TxTokenData},
+};
+
+/// Struct for broadcasting txs on the network
+#[derive(Debug)]
+pub struct QueryBroadcast<'a> {
+    /// Database
+    pub db: &'a Db,
+    /// Avalanche
+    pub avalanche: &'a Avalanche,
+    /// Mempool
+    pub mempool: &'a Mempool,
+    /// Access to bitcoind to actually broadcast txs
+    pub node: &'a Node,
+}
+
+/// Errors indicating something went wrong with reading txs.
+#[derive(Debug, Error, PartialEq)]
+pub enum QueryBroadcastError {
+    /// Transaction not in mempool nor DB.
+    #[error("400: Parsing tx failed {0}")]
+    ParsingFailed(DataError),
+
+    /// Token validation error that prevented us from broadcasting the tx
+    #[error("400: {0}")]
+    TokenError(String),
+
+    /// Node rejected the tx
+    #[error("400: Broadcast failed: {0}")]
+    BroadcastFailed(String),
+}
+
+impl QueryBroadcast<'_> {
+    /// Broadcast all the txs; if one fails token validation we don't broadcast
+    /// any of them.
+    pub fn broadcast_txs(
+        &self,
+        raw_txs: &[Bytes],
+        skip_token_checks: bool,
+    ) -> Result<Vec<TxId>> {
+        let mut coins_to_uncache = vec![];
+        if !skip_token_checks {
+            coins_to_uncache = self.do_token_checks(raw_txs)?;
+        }
+        let mut txids = Vec::with_capacity(raw_txs.len());
+        let default_max_fee_rate = ffi::default_max_raw_tx_fee_rate_per_kb();
+        for raw_tx in raw_txs.iter() {
+            let max_fee = ffi::calc_fee(raw_tx.len(), default_max_fee_rate);
+            txids.push(TxId::from(
+                self.node.bridge.broadcast_tx(raw_tx, max_fee).or_else(
+                    |err| -> Result<_> {
+                        self.node.bridge.uncache_coins(&coins_to_uncache)?;
+                        Err(BroadcastFailed(err.to_string()).into())
+                    },
+                )?,
+            ));
+        }
+        Ok(txids)
+    }
+
+    fn do_token_checks(&self, raw_txs: &[Bytes]) -> Result<Vec<ffi::OutPoint>> {
+        let mut token_errors = Vec::new();
+        let mut coins_to_uncache = Vec::new();
+        for mut raw_tx in raw_txs.iter().cloned() {
+            let tx = TxMut::deser(&mut raw_tx).map_err(ParsingFailed)?;
+            let mut ffi_tx = ffi::Tx::from(tx);
+
+            let mut tx_not_found = Vec::new();
+            let mut tx_coins_to_uncache = Vec::new();
+            self.node.bridge.lookup_spent_coins(
+                &mut ffi_tx,
+                &mut tx_not_found,
+                &mut tx_coins_to_uncache,
+            )?;
+            coins_to_uncache.extend(tx_coins_to_uncache);
+
+            let tx = Tx::from(ffi_tx);
+            let token =
+                TxTokenData::from_unbroadcast_tx(self.db, self.mempool, &tx)?;
+            let Some(token) = token else {
+                continue;
+            };
+            let mut burn_msgs = Vec::new();
+            for failed_parsing in &token.tx.failed_parsings {
+                burn_msgs.push(failed_parsing.to_string());
+            }
+            for entry in &token.tx.entries {
+                if !entry.is_normal() {
+                    burn_msgs.push(entry.burn_summary());
+                }
+            }
+            if !burn_msgs.is_empty() {
+                token_errors.push((tx.txid(), burn_msgs));
+            }
+        }
+        let mut error_msg = String::new();
+        for (tx_idx, (txid, errors)) in token_errors.iter().enumerate() {
+            error_msg.push_str(&format!("Tx {} failed token checks: ", txid));
+            for (error_idx, error) in errors.iter().enumerate() {
+                error_msg.push_str(error);
+                error_msg.push('.');
+                if tx_idx != token_errors.len() - 1
+                    || error_idx != errors.len() - 1
+                {
+                    error_msg.push(' ');
+                }
+            }
+        }
+        if !token_errors.is_empty() {
+            return Err(TokenError(error_msg).into());
+        }
+        Ok(coins_to_uncache)
+    }
+}
diff --git a/chronik/chronik-indexer/src/query/mod.rs b/chronik/chronik-indexer/src/query/mod.rs
index 900c6453c..386fb8f5c 100644
--- a/chronik/chronik-indexer/src/query/mod.rs
+++ b/chronik/chronik-indexer/src/query/mod.rs
@@ -1,19 +1,21 @@
 // Copyright (c) 2023 The Bitcoin developers
 // Distributed under the MIT software license, see the accompanying
 // file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
 //! Module for structs helping to query the indexer.
 
 mod blocks;
+mod broadcast;
 mod group_history;
 mod group_utxos;
 mod tx_token_data;
 mod txs;
 mod util;
 
 pub use self::blocks::*;
+pub use self::broadcast::*;
 pub use self::group_history::*;
 pub use self::group_utxos::*;
 pub use self::tx_token_data::*;
 pub use self::txs::*;
 pub use self::util::*;
diff --git a/chronik/chronik-indexer/src/query/tx_token_data.rs b/chronik/chronik-indexer/src/query/tx_token_data.rs
index 34e186ad1..ddd198d52 100644
--- a/chronik/chronik-indexer/src/query/tx_token_data.rs
+++ b/chronik/chronik-indexer/src/query/tx_token_data.rs
@@ -1,341 +1,409 @@
 // Copyright (c) 2024 The Bitcoin developers
 // Distributed under the MIT software license, see the accompanying
 // file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
 //! Module for [`TxTokenData`].
 
 use std::borrow::Cow;
 
 use abc_rust_error::Result;
 use bitcoinsuite_core::{
     hash::Hashed,
-    tx::{Tx, TxId},
+    tx::{OutPoint, Tx, TxId},
 };
 use bitcoinsuite_slp::{
     color::ColoredTx,
     structs::{GenesisInfo, Token, TokenMeta, TxType},
     token_tx::TokenTx,
     token_type::{AlpTokenType, SlpTokenType, TokenType},
     verify::{SpentToken, VerifyContext},
 };
 use chronik_db::{
     db::Db,
     io::{token::TokenReader, BlockHeight, BlockReader, TxNum, TxReader},
-    mem::{Mempool, MempoolTokens},
+    mem::{Mempool, MempoolTokens, MempoolTokensError},
 };
 use chronik_proto::proto;
 use thiserror::Error;
 
 use crate::{avalanche::Avalanche, query::tx_token_data::TxTokenDataError::*};
 
 /// Helper struct to bundle token data coming from the DB or mempool.
 ///
 /// We use [`Cow`]s so we can either reference the mempool directly (`Borrowed`)
 /// or store the loaded result from the DB (`Owned`).
 #[derive(Debug)]
 pub struct TxTokenData<'m> {
     /// Token inputs of the token tx.
     pub inputs: Cow<'m, [Option<SpentToken>]>,
     /// Verified token data of the tx.
     pub tx: Cow<'m, TokenTx>,
 }
 
 /// Errors indicating something went wrong with reading token txs.
 #[derive(Debug, Error, PartialEq)]
 pub enum TxTokenDataError {
     /// Token num not found.
     #[error("500: Inconsistent DB: Token num {0} not found")]
     TokenTxNumDoesntExist(TxNum),
 
+    /// Transaction token inputs couldn't be queried from the DB
+    #[error("400: {0}")]
+    BadTxInputs(MempoolTokensError),
+
+    /// TxInput has no coin.
+    #[error("400: TxInput {0:?} has no coin")]
+    TxInputHasNoCoin(OutPoint),
+
     /// Token data not found in mempool but should be there
     #[error("500: Inconsistent DB: TxData for token {0} not in mempool")]
     TokenTxDataNotInMempool(TxId),
 
     /// Mempool tx not found in mempool but should be there
     #[error("500: Inconsistent DB: MempoolTx for token {0} not in mempool")]
     TokenTxNotInMempool(TxId),
 
     /// Block not found in DB but should be there
     #[error("500: Inconsistent DB: Missing block for height {0}")]
     MissingBlockForHeight(BlockHeight),
 }
 
 impl<'m> TxTokenData<'m> {
     /// Load token data from the mempool
     pub fn from_mempool(mempool: &'m MempoolTokens, tx: &Tx) -> Option<Self> {
         let token_tx = mempool.token_tx(tx.txid_ref());
         let token_inputs = mempool.tx_token_inputs(tx.txid_ref());
         if token_tx.is_none() && token_inputs.is_none() {
             return None;
         }
         Some(TxTokenData {
             inputs: token_inputs
                 .map(Cow::Borrowed)
                 .unwrap_or(Cow::Owned(vec![None; tx.inputs.len()])),
             tx: token_tx.map(Cow::Borrowed).unwrap_or_else(|| {
                 let context = VerifyContext {
                     genesis_info: None,
                     spent_tokens: token_inputs.as_ref().unwrap(),
                     spent_scripts: None,
                     override_has_mint_vault: Some(false),
                 };
                 Cow::Owned(context.verify(ColoredTx {
                     outputs: vec![None; tx.outputs.len()],
                     ..Default::default()
                 }))
             }),
         })
     }
 
     /// Load token data from the DB of a mined tx
     pub fn from_db(db: &Db, tx_num: TxNum, tx: &Tx) -> Result<Option<Self>> {
         let colored = ColoredTx::color_tx(tx);
 
         let token_reader = TokenReader::new(db)?;
 
         let (spent_tokens, db_tx_data) =
             match token_reader.spent_tokens_and_db_tx(tx_num)? {
                 Some(db_data) => db_data,
                 None if colored.is_none() => return Ok(None),
                 _ => Default::default(),
             };
 
         let context = VerifyContext {
             genesis_info: None,
             spent_tokens: &spent_tokens,
             spent_scripts: None,
             override_has_mint_vault: Some(db_tx_data.has_mint_vault()),
         };
         let verified = context.verify(colored.unwrap_or_default());
         Ok(Some(TxTokenData {
             inputs: Cow::Owned(spent_tokens),
             tx: Cow::Owned(verified),
         }))
     }
 
+    /// Load token data of a tx not in the mempool or DB.
+    /// The inputs of `tx` are expected to have `Coin`s set to validate SLP V2
+    /// Mint Vault MINT txs.
+    pub fn from_unbroadcast_tx(
+        db: &Db,
+        mempool: &'m Mempool,
+        tx: &Tx,
+    ) -> Result<Option<Self>> {
+        let colored = ColoredTx::color_tx(tx);
+
+        let tx_reader = TxReader::new(db)?;
+        let token_reader = TokenReader::new(db)?;
+        let spent_tokens = mempool
+            .tokens()
+            .fetch_tx_spent_tokens(tx, db, |txid| mempool.tx(txid).is_some())?
+            .map_err(BadTxInputs)?;
+
+        let colored = colored.unwrap_or_else(|| ColoredTx {
+            outputs: vec![None; tx.outputs.len()],
+            ..Default::default()
+        });
+        let mut spent_scripts = None;
+        let mut genesis_info = None;
+        if let Some(first_section) = colored.sections.first() {
+            if first_section.is_mint_vault_mint() {
+                let spent_scripts =
+                    spent_scripts.insert(Vec::with_capacity(tx.inputs.len()));
+                for tx_input in &tx.inputs {
+                    let coin = tx_input
+                        .coin
+                        .as_ref()
+                        .ok_or(TxInputHasNoCoin(tx_input.prev_out))?;
+                    spent_scripts.push(coin.output.script.clone());
+                }
+                let genesis_tx_num = tx_reader
+                    .tx_num_by_txid(first_section.meta.token_id.txid())?;
+                if let Some(genesis_tx_num) = genesis_tx_num {
+                    if let Some(db_genesis_info) =
+                        token_reader.genesis_info(genesis_tx_num)?
+                    {
+                        genesis_info = Some(db_genesis_info);
+                    }
+                }
+            }
+        }
+
+        let context = VerifyContext {
+            genesis_info: genesis_info.as_ref(),
+            spent_tokens: &spent_tokens,
+            spent_scripts: spent_scripts.as_deref(),
+            override_has_mint_vault: None,
+        };
+        let verified = context.verify(colored);
+
+        Ok(Some(TxTokenData {
+            inputs: Cow::Owned(spent_tokens),
+            tx: Cow::Owned(verified),
+        }))
+    }
+
     /// Build token data for a tx input
     pub fn input_token_proto(&self, input_idx: usize) -> Option<proto::Token> {
         let spent_token = self.inputs.get(input_idx)?.as_ref()?;
         let token = &spent_token.token;
         let entry_idx = self
             .tx
             .entries
             .iter()
             .position(|section| section.meta == token.meta)
             .map(|section| section as i32)
             .unwrap_or(-1);
         Some(proto::Token {
             token_id: token.meta.token_id.to_string(),
             token_type: Some(make_token_type_proto(token.meta.token_type)),
             entry_idx,
             amount: token.variant.amount(),
             is_mint_baton: token.variant.is_mint_baton(),
         })
     }
 
     /// Build token data for a tx output
     pub fn output_token_proto(
         &self,
         output_idx: usize,
     ) -> Option<proto::Token> {
         let token_output = self.tx.outputs.get(output_idx)?.as_ref()?;
         let token = self.tx.token(token_output);
 
         Some(proto::Token {
             token_id: token.meta.token_id.to_string(),
             token_type: Some(make_token_type_proto(token.meta.token_type)),
             entry_idx: token_output.token_idx as _,
             amount: token.variant.amount() as _,
             is_mint_baton: token.variant.is_mint_baton(),
         })
     }
 
     /// Build token entry protobuf data for a token tx
     pub fn entries_proto(&self) -> Vec<proto::TokenEntry> {
         self.tx
             .entries
             .iter()
             .map(|entry| proto::TokenEntry {
                 token_id: entry.meta.token_id.to_string(),
                 token_type: Some(make_token_type_proto(entry.meta.token_type)),
                 tx_type: match entry.tx_type {
                     Some(TxType::GENESIS) => proto::TokenTxType::Genesis,
                     Some(TxType::MINT) => proto::TokenTxType::Mint,
                     Some(TxType::SEND) => proto::TokenTxType::Send,
                     Some(TxType::UNKNOWN) => proto::TokenTxType::Unknown,
                     Some(TxType::BURN) => proto::TokenTxType::Burn,
                     None => proto::TokenTxType::None,
                 } as _,
                 is_invalid: entry.is_invalid,
                 group_token_id: entry
                     .group_token_meta
                     .as_ref()
                     .map_or(String::new(), |meta| meta.token_id.to_string()),
                 burn_summary: if entry.is_normal() {
                     String::new()
                 } else {
                     entry.burn_summary()
                 },
                 failed_colorings: entry
                     .failed_colorings
                     .iter()
                     .map(|failed_coloring| proto::TokenFailedColoring {
                         pushdata_idx: failed_coloring.pushdata_idx as _,
                         error: failed_coloring.error.to_string(),
                     })
                     .collect(),
                 actual_burn_amount: entry.actual_burn_amount.to_string(),
                 intentional_burn: entry
                     .intentional_burn_amount
                     .unwrap_or_default(),
                 burns_mint_batons: entry.burns_mint_batons,
             })
             .collect()
     }
 }
 
 /// Read just the output data of a token tx from the DB
 pub fn read_db_token_output(
     db: &Db,
     tx_num: TxNum,
     out_idx: u32,
 ) -> Result<Option<SpentToken>> {
     let token_reader = TokenReader::new(db)?;
     let Some(db_token_tx) = token_reader.token_tx(tx_num)? else {
         return Ok(None);
     };
     db_token_tx.spent_token(&db_token_tx.outputs[out_idx as usize], |tx_num| {
         Ok(token_reader
             .token_meta(tx_num)?
             .ok_or(TokenTxNumDoesntExist(tx_num))?)
     })
 }
 
 /// Build a protobuf token type
 pub fn make_token_type_proto(token_type: TokenType) -> proto::TokenType {
     proto::TokenType {
         token_type: Some(match token_type {
             TokenType::Slp(slp) => {
                 use proto::SlpTokenType::*;
                 proto::token_type::TokenType::Slp(match slp {
                     SlpTokenType::Fungible => Fungible as _,
                     SlpTokenType::MintVault => MintVault as _,
                     SlpTokenType::Nft1Group => Nft1Group as _,
                     SlpTokenType::Nft1Child => Nft1Child as _,
                     SlpTokenType::Unknown(unknown) => unknown as _,
                 })
             }
             TokenType::Alp(alp) => {
                 use proto::AlpTokenType::*;
                 proto::token_type::TokenType::Alp(match alp {
                     AlpTokenType::Standard => Standard as _,
                     AlpTokenType::Unknown(unknown) => unknown as _,
                 })
             }
         }),
     }
 }
 
 /// Build protobuf genesis info
 pub fn make_genesis_info_proto(
     genesis_info: &GenesisInfo,
 ) -> proto::GenesisInfo {
     proto::GenesisInfo {
         token_ticker: genesis_info.token_ticker.to_vec(),
         token_name: genesis_info.token_name.to_vec(),
         url: genesis_info.url.to_vec(),
         hash: genesis_info
             .hash
             .as_ref()
             .map_or(vec![], |hash| hash.to_vec()),
         mint_vault_scripthash: genesis_info
             .mint_vault_scripthash
             .map_or(vec![], |hash| hash.to_le_vec()),
         data: genesis_info
             .data
             .as_ref()
             .map_or(vec![], |data| data.to_vec()),
         auth_pubkey: genesis_info
             .auth_pubkey
             .as_ref()
             .map_or(vec![], |pubkey| pubkey.to_vec()),
         decimals: genesis_info.decimals as _,
     }
 }
 
 /// Build a token for UTXO data
 pub fn make_utxo_token_proto(token: &Token) -> proto::Token {
     proto::Token {
         token_id: token.meta.token_id.to_string(),
         token_type: Some(make_token_type_proto(token.meta.token_type)),
         entry_idx: -1,
         amount: token.variant.amount(),
         is_mint_baton: token.variant.is_mint_baton(),
     }
 }
 
 /// Info about a token in the DB/Mempool
 #[derive(Debug)]
 pub struct TokenInfo {
     /// Meta of the token
     pub meta: TokenMeta,
     /// Info in the GENESIS tx of the token
     pub genesis_info: GenesisInfo,
     /// Block the token GENESIS has been mined in, if it's been mined already
     pub block: Option<proto::BlockMetadata>,
     /// First time the GENESIS tx of the token has been seen on the network
     pub time_first_seen: i64,
 }
 
 /// Read token info from the DB or mempool
 pub fn read_token_info(
     db: &Db,
     mempool: &Mempool,
     avalanche: &Avalanche,
     token_id_txid: &TxId,
 ) -> Result<Option<TokenInfo>> {
     if let Some(genesis_info) = mempool.tokens().genesis_info(token_id_txid) {
         let token_tx = mempool
             .tokens()
             .token_tx(token_id_txid)
             .ok_or(TokenTxDataNotInMempool(*token_id_txid))?;
         let mempool_tx = mempool
             .tx(token_id_txid)
             .ok_or(TokenTxNotInMempool(*token_id_txid))?;
         return Ok(Some(TokenInfo {
             meta: token_tx.entries[0].meta,
             genesis_info: genesis_info.clone(),
             block: None,
             time_first_seen: mempool_tx.time_first_seen,
         }));
     }
     let tx_reader = TxReader::new(db)?;
     let token_reader = TokenReader::new(db)?;
     let block_reader = BlockReader::new(db)?;
     let (tx_num, block_tx) =
         match tx_reader.tx_and_num_by_txid(token_id_txid)? {
             Some(tuple) => tuple,
             None => return Ok(None),
         };
     let block = block_reader
         .by_height(block_tx.block_height)?
         .ok_or(MissingBlockForHeight(block_tx.block_height))?;
     let genesis_info = match token_reader.genesis_info(tx_num)? {
         Some(db_genesis) => db_genesis,
         None => return Ok(None),
     };
     let meta = token_reader
         .token_meta(tx_num)?
         .ok_or(TokenTxNumDoesntExist(tx_num))?;
     Ok(Some(TokenInfo {
         meta,
         genesis_info,
         block: Some(proto::BlockMetadata {
             height: block_tx.block_height,
             hash: block.hash.to_vec(),
             timestamp: block.timestamp,
             is_final: avalanche.is_final_height(block_tx.block_height),
         }),
         time_first_seen: block_tx.entry.time_first_seen,
     }))
 }
diff --git a/chronik/chronik-proto/proto/chronik.proto b/chronik/chronik-proto/proto/chronik.proto
index 708ebe8cf..518eb9eca 100644
--- a/chronik/chronik-proto/proto/chronik.proto
+++ b/chronik/chronik-proto/proto/chronik.proto
@@ -1,437 +1,465 @@
 // Copyright (c) 2023 The Bitcoin developers
 // Distributed under the MIT software license, see the accompanying
 // file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
 syntax = "proto3";
 
 package chronik;
 
 // Block on the blockchain
 message Block {
     // Info about the block
     BlockInfo block_info = 1;
 }
 
 // Range of blocks
 message Blocks {
     // Queried blocks
     repeated BlockInfo blocks = 1;
 }
 
 // Info about the state of the blockchain.
 message BlockchainInfo {
     // Hash (little-endian) of the current tip
     bytes tip_hash = 1;
     // Height of the current tip (genesis has height = 0)
     int32 tip_height = 2;
 }
 
 // Info about the chronik software this server is running
 message ChronikInfo {
     // chronik server version from chronik-http/Cargo.toml
     string version = 1;
 }
 
 // Info about a block
 message BlockInfo {
     // Hash (little-endian)
     bytes hash = 1;
     // Hash of the previous block (little-endian)
     bytes prev_hash = 2;
     // Height in the chain
     int32 height = 3;
     // nBits field encoding the target
     uint32 n_bits = 4;
     // Timestamp field of the block
     int64 timestamp = 5;
     // Whether the block has been finalized by Avalanche
     bool is_final = 14;
     // Block size of this block in bytes (including headers etc.)
     uint64 block_size = 6;
     // Number of txs in this block
     uint64 num_txs = 7;
     // Total number of tx inputs in block (including coinbase)
     uint64 num_inputs = 8;
     // Total number of tx output in block (including coinbase)
     uint64 num_outputs = 9;
     // Total number of satoshis spent by tx inputs
     int64 sum_input_sats = 10;
     // Block reward for this block
     int64 sum_coinbase_output_sats = 11;
     // Total number of satoshis in non-coinbase tx outputs
     int64 sum_normal_output_sats = 12;
     // Total number of satoshis burned using OP_RETURN
     int64 sum_burned_sats = 13;
 }
 
 // Details about a transaction
 message Tx {
     // TxId (little-endian) of the tx
     bytes txid = 1;
     // nVersion
     int32 version = 2;
     // Inputs of the tx (aka. `vin`)
     repeated TxInput inputs = 3;
     // Outputs of the tx (aka. `vout`)
     repeated TxOutput outputs = 4;
     // nLockTime
     uint32 lock_time = 5;
     // Which block this tx is in, or None, if in the mempool
     BlockMetadata block = 8;
     // Time this tx has first been added to the mempool, or 0 if unknown
     int64 time_first_seen = 9;
     // Serialized size of the tx
     uint32 size = 11;
     // Whether this tx is a coinbase tx
     bool is_coinbase = 12;
     // Tokens involved in this txs
     repeated TokenEntry token_entries = 13;
     // Failed parsing attempts of this tx
     repeated TokenFailedParsing token_failed_parsings = 14;
     // Token status, i.e. whether this tx has any tokens or unintentional token burns
     // or something unexpected, like failed parsings etc.
     TokenStatus token_status = 15;
 }
 
 // UTXO of a script.
 message ScriptUtxo {
     // txid and out_idx of the unspent output.
     OutPoint outpoint = 1;
     // Block height of the UTXO, or -1 if in mempool.
     int32 block_height = 2;
     // Whether the UTXO has been created in a coinbase tx.
     bool is_coinbase = 3;
     // Value of the output, in satoshis.
     int64 value = 5;
     // Whether the UTXO has been finalized by Avalanche.
     bool is_final = 10;
     // Token value attached to this UTXO
     Token token = 11;
 }
 
 // COutPoint, points to a coin being spent by an input.
 message OutPoint {
     // TxId of the tx of the output being spent.
     bytes txid = 1;
     // Index of the output spent within the transaction.
     uint32 out_idx = 2;
 }
 
 // Points to an input spending a coin.
 message SpentBy {
     // TxId of the tx with the input.
     bytes txid = 1;
     // Index in the inputs of the tx.
     uint32 input_idx = 2;
 }
 
 // CTxIn, spends a coin.
 message TxInput {
     // Reference to the coin being spent.
     OutPoint prev_out = 1;
     // scriptSig, script unlocking the coin.
     bytes input_script = 2;
     // scriptPubKey, script of the output locking the coin.
     bytes output_script = 3;
     // value of the output being spent, in satoshis.
     int64 value = 4;
     // nSequence of the input.
     uint32 sequence_no = 5;
     // Token value attached to this input
     Token token = 8;
 }
 
 // CTxOut, creates a new coin.
 message TxOutput {
     // Value of the coin, in satoshis.
     int64 value = 1;
     // scriptPubKey, script locking the output.
     bytes output_script = 2;
     // Which tx and input spent this output, if any.
     SpentBy spent_by = 4;
     // Token value attached to this output
     Token token = 5;
 }
 
 // Data about a block which a Tx is in.
 message BlockMetadata {
     // Height of the block the tx is in.
     int32 height = 1;
     // Hash of the block the tx is in.
     bytes hash = 2;
     // nTime of the block the tx is in.
     int64 timestamp = 3;
     // Whether the block has been finalized by Avalanche.
     bool is_final = 4;
 }
 
 // Status of a token tx
 enum TokenStatus {
     // Tx involves no tokens whatsover, i.e. neither any burns nor any failed
     // parsing/coloring or any tokens being created / moved.
     TOKEN_STATUS_NON_TOKEN = 0;
     // Tx involves tokens but no unintentional burns or failed parsings/colorings
     TOKEN_STATUS_NORMAL = 1;
     // Tx involves tokens but contains unintentional burns or failed parsings/colorings
     TOKEN_STATUS_NOT_NORMAL = 2;
 }
 
 // ALP token type
 enum AlpTokenType {
     // Standard ALP token type
     ALP_TOKEN_TYPE_STANDARD = 0;
 }
 
 // SLP token type
 enum SlpTokenType {
     // Unknown "0" token type
     SLP_TOKEN_TYPE_NONE = 0;
     // SLP V1 token type
     SLP_TOKEN_TYPE_FUNGIBLE = 1;
     // SLP V2 mint vault token type
     SLP_TOKEN_TYPE_MINT_VAULT = 2;
     // NFT1 group token type
     SLP_TOKEN_TYPE_NFT1_GROUP = 0x81;
     // NFT1 child token type
     SLP_TOKEN_TYPE_NFT1_CHILD = 0x41;
 }
 
 // SLP/ALP token type
 message TokenType {
     // SLP/ALP token type
     oneof token_type {
         // SLP token type. Can have unknown values for unknown token types
         SlpTokenType slp = 1;
         // ALP token type. Can have unknown values for unknown token types
         AlpTokenType alp = 2;
     }
 }
 
 // SLP/ALP tx type
 enum TokenTxType {
     // No tx type, e.g. when input tokens are burned
     NONE = 0;
     // Unknown tx type, i.e. for unknown token types
     UNKNOWN = 1;
     // GENESIS tx
     GENESIS = 2;
     // SEND tx
     SEND = 3;
     // MINT tx
     MINT = 4;
     // BURN tx
     BURN = 5;
 }
 
 // Info about a token
 message TokenInfo {
     // Hex token_id (in big-endian, like usually displayed to users) of the token.
     // This is not `bytes` because SLP and ALP use different endiannnes, so to avoid this we use hex, which conventionally implies big-endian in a bitcoin context.
     string token_id = 1;
     // Token type of the token
     TokenType token_type = 2;
     // Info found in the token's GENESIS tx
     GenesisInfo genesis_info = 3;
     // Block of the GENESIS tx, if it's mined already
     BlockMetadata block = 4;
     // Time the GENESIS tx has first been seen by the indexer
     int64 time_first_seen = 5;
 }
 
 // Token involved in a transaction
 message TokenEntry {
     // Hex token_id (in big-endian, like usually displayed to users) of the token.
     // This is not `bytes` because SLP and ALP use different endiannes, so to avoid
     // this we use hex, which conventionally implies big-endian in a bitcoin context.
     string token_id = 1;
     // Token type of the token
     TokenType token_type = 2;
     // Tx type of the token; NONE if there's no section that introduced it (e.g. in an accidental burn)
     TokenTxType tx_type = 3;
     // For NFT1 Child tokens: group ID
     string group_token_id = 4;
     // Whether the validation rules have been violated for this section
     bool is_invalid = 5;
     // Human-readable error message of why this entry burned tokens
     string burn_summary = 6;
     // Human-readable error messages of why colorings failed
     repeated TokenFailedColoring failed_colorings = 7;
     // Number of actually burned tokens (as decimal integer string, e.g. "2000").
     // This is because burns can exceed the 64-bit range of values and protobuf doesn't have a nice type to encode this.
     string actual_burn_amount = 8;
     // Burn amount the user explicitly opted into
     uint64 intentional_burn = 9;
     // Whether any mint batons have been burned of this token
     bool burns_mint_batons = 10;
 }
 
 // Genesis info found in GENESIS txs of tokens
 message GenesisInfo {
     // token_ticker of the token
     bytes token_ticker = 1;
     // token_name of the token
     bytes token_name = 2;
     // URL of the token
     bytes url = 3;
     // token_document_hash of the token (only on SLP)
     bytes hash = 4;
     // mint_vault_scripthash (only on SLP V2 Mint Vault)
     bytes mint_vault_scripthash = 5;
     // Arbitray payload data of the token (only on ALP)
     bytes data = 6;
     // auth_pubkey of the token (only on ALP)
     bytes auth_pubkey = 7;
     // decimals of the token, i.e. how many decimal places the token should be displayed with.
     uint32 decimals = 8;
 }
 
 // Token coloring an input or output
 message Token {
     // Hex token_id of the token, see `TokenInfo` for details
     string token_id = 1;
     // Token type of the token
     TokenType token_type = 2;
     // Index into `token_entries` for `Tx`. -1 for UTXOs
     int32 entry_idx = 3;
     // Base token amount of the input/output
     uint64 amount = 4;
     // Whether the token is a mint baton
     bool is_mint_baton = 5;
 }
 
 // A report of a failed parsing attempt of SLP/ALP.
 // This should always indicate something went wrong when building the tx.
 message TokenFailedParsing {
     // For ALP, the index of the pushdata in the OP_RETURN that failed parsing.
     // -1 if the whole OP_RETURN failed, e.g. for SLP or eMPP
     int32 pushdata_idx = 1;
     // The bytes that failed parsing, useful for debugging
     bytes bytes = 2;
     // Human-readable message of what went wrong
     string error = 3;
 }
 
 // A report of a failed coloring attempt of SLP/ALP.
 // This should always indicate something went wrong when building the tx.
 message TokenFailedColoring {
     // For ALP, the index of the pushdata in the OP_RETURN that failed parsing.
     int32 pushdata_idx = 1;
     // Human-readable message of what went wrong
     string error = 3;
 }
 
 // Page with txs
 message TxHistoryPage {
     // Txs of the page
     repeated Tx txs = 1;
     // How many pages there are total
     uint32 num_pages = 2;
     // How many txs there are total
     uint32 num_txs = 3;
 }
 
 // List of UTXOs of a script
 message ScriptUtxos {
     // The serialized script of the UTXOs
     bytes script = 1;
     // UTXOs of the script.
     repeated ScriptUtxo utxos = 2;
 }
 
+// Broadcast a single tx
+message BroadcastTxRequest {
+    // Serialized tx
+    bytes raw_tx = 1;
+    // Whether to skip token checks and broadcast even if tokens are unintentionally burned
+    bool skip_token_checks = 2;
+}
+
+// Response of broadcasting the tx
+message BroadcastTxResponse {
+    // TxId of the broadcast tx
+    bytes txid = 1;
+}
+
+// Broadcast multiple txs. If one of the txs fails token validation, the entire batch will not be broadcast.
+message BroadcastTxsRequest {
+    // Serialized txs.
+    repeated bytes raw_txs = 1;
+    // Whether to skip token checks and broadcast even if tokens are unintentionally burned
+    bool skip_token_checks = 2;
+}
+
+// Response of broadcasting txs
+message BroadcastTxsResponse {
+    // TxIds of the broadcast txs
+    repeated bytes txids = 1;
+}
+
 // Raw serialized tx.
 message RawTx {
     // Bytes of the serialized tx.
     bytes raw_tx = 1;
 }
 
 // Subscription to WebSocket updates.
 message WsSub {
     // Set this to `true` to unsubscribe from the event.
     bool is_unsub = 1;
     // What kind of updates to subscribe to.
     oneof sub_type {
         // Subscription to block updates
         WsSubBlocks blocks = 2;
         // Subscription to a script
         WsSubScript script = 3;
     }
 }
 
 // Subscription to blocks. They will be sent any time a block got connected,
 // disconnected or finalized.
 message WsSubBlocks {}
 
 // Subscription to a script. They will be send every time a tx spending the
 // given script or sending to the given script has been added to/removed from
 // the mempool, or confirmed in a block.
 message WsSubScript {
     // Script type to subscribe to ("p2pkh", "p2sh", "p2pk", "other").
     string script_type = 1;
     // Payload for the given script type:
     // - 20-byte hash for "p2pkh" and "p2sh"
     // - 33-byte or 65-byte pubkey for "p2pk"
     // - Serialized script for "other"
     bytes payload = 2;
 }
 
 // Message coming from the WebSocket
 message WsMsg {
     // Kind of message
     oneof msg_type {
         // Error, e.g. when a bad message has been sent into the WebSocket.
         Error error = 1;
         // Block got connected, disconnected, finalized, etc.
         MsgBlock block = 2;
         // Tx got added to/removed from the mempool, or confirmed in a block.
         MsgTx tx = 3;
     }
 }
 
 // Block got connected, disconnected, finalized, etc.
 message MsgBlock {
     // What happened to the block
     BlockMsgType msg_type = 1;
     // Hash of the block (little-endian)
     bytes block_hash = 2;
     // Height of the block
     int32 block_height = 3;
 }
 
 // Type of message for the block
 enum BlockMsgType {
     // Block connected to the blockchain
     BLK_CONNECTED = 0;
     // Block disconnected from the blockchain
     BLK_DISCONNECTED = 1;
     // Block has been finalized by Avalanche
     BLK_FINALIZED = 2;
 }
 
 // Tx got added to/removed from mempool, or confirmed in a block, etc.
 message MsgTx {
     // What happened to the tx
     TxMsgType msg_type = 1;
     // Txid of the tx (little-endian)
     bytes txid = 2;
 }
 
 // Type of message for a tx
 enum TxMsgType {
     // Tx added to the mempool
     TX_ADDED_TO_MEMPOOL = 0;
     // Tx removed from the mempool
     TX_REMOVED_FROM_MEMPOOL = 1;
     // Tx confirmed in a block
     TX_CONFIRMED = 2;
     // Tx finalized by Avalanche
     TX_FINALIZED = 3;
 }
 
 // Empty msg without any data
 message Empty {}
 
 // Error message returned from our APIs.
 message Error {
     // 2, as legacy chronik uses this for the message so we're still compatible.
     string msg = 2;
 }
diff --git a/test/functional/chronik_token_alp.py b/test/functional/chronik_token_alp.py
index 7c5a73d9e..05032eb0d 100644
--- a/test/functional/chronik_token_alp.py
+++ b/test/functional/chronik_token_alp.py
@@ -1,667 +1,667 @@
 #!/usr/bin/env python3
 # Copyright (c) 2024 The Bitcoin developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """
 Test Chronik's ALP integration.
 """
 
 from test_framework.address import (
     ADDRESS_ECREG_P2SH_OP_TRUE,
     ADDRESS_ECREG_UNSPENDABLE,
     P2SH_OP_TRUE,
     SCRIPTSIG_OP_TRUE,
 )
 from test_framework.blocktools import (
     create_block,
     create_coinbase,
     make_conform_to_ctor,
 )
 from test_framework.chronik.alp import (
     alp_burn,
     alp_genesis,
     alp_mint,
     alp_opreturn,
     alp_send,
 )
 from test_framework.chronik.token_tx import TokenTx
 from test_framework.messages import COutPoint, CTransaction, CTxIn, CTxOut
 from test_framework.p2p import P2PDataStore
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.txtools import pad_tx
 
 
 class ChronikTokenAlp(BitcoinTestFramework):
     def set_test_params(self):
         self.setup_clean_chain = True
         self.num_nodes = 1
         self.extra_args = [["-chronik"]]
 
     def skip_test_if_missing_module(self):
         self.skip_if_no_chronik()
 
     def run_test(self):
         from test_framework.chronik.client import pb
 
         def alp_token(token_type=None, **kwargs) -> pb.Token:
             return pb.Token(
                 token_type=token_type or pb.TokenType(alp=pb.ALP_TOKEN_TYPE_STANDARD),
                 **kwargs,
             )
 
         node = self.nodes[0]
         chronik = node.get_chronik_client()
 
         peer = node.add_p2p_connection(P2PDataStore())
         mocktime = 1300000000
         node.setmocktime(mocktime)
 
         coinblockhash = self.generatetoaddress(node, 1, ADDRESS_ECREG_P2SH_OP_TRUE)[0]
         coinblock = node.getblock(coinblockhash)
         cointx = coinblock["tx"][0]
 
         block_hashes = self.generatetoaddress(node, 100, ADDRESS_ECREG_UNSPENDABLE)
 
         coinvalue = 5000000000
 
         txs = []
         tx_names = []
 
         # ALP GENESIS tx
         tx = CTransaction()
         tx.vin = [CTxIn(COutPoint(int(cointx, 16), 0), SCRIPTSIG_OP_TRUE)]
         tx.vout = [
             alp_opreturn(
                 alp_genesis(
                     token_ticker=b"TEST",
                     token_name=b"Test Token",
                     url=b"http://example.com",
                     data=b"Token Data",
                     auth_pubkey=b"Token Pubkey",
                     decimals=4,
                     mint_amounts=[10, 20, 30, 0],
                     num_batons=2,
                 )
             ),
             CTxOut(10000, P2SH_OP_TRUE),
             CTxOut(546, P2SH_OP_TRUE),
             CTxOut(546, P2SH_OP_TRUE),
             CTxOut(coinvalue - 100000, P2SH_OP_TRUE),
             CTxOut(5000, P2SH_OP_TRUE),
             CTxOut(546, P2SH_OP_TRUE),
         ]
         tx.rehash()
         genesis = TokenTx(
             tx=tx,
             status=pb.TOKEN_STATUS_NORMAL,
             entries=[
                 pb.TokenEntry(
                     token_id=tx.hash,
                     token_type=pb.TokenType(alp=pb.ALP_TOKEN_TYPE_STANDARD),
                     tx_type=pb.GENESIS,
                     actual_burn_amount="0",
                 ),
             ],
             inputs=[pb.Token()],
             outputs=[
                 pb.Token(),
                 alp_token(token_id=tx.hash, amount=10),
                 alp_token(token_id=tx.hash, amount=20),
                 alp_token(token_id=tx.hash, amount=30),
                 pb.Token(),
                 alp_token(token_id=tx.hash, is_mint_baton=True),
                 alp_token(token_id=tx.hash, is_mint_baton=True),
             ],
             token_info=pb.TokenInfo(
                 token_id=tx.hash,
                 token_type=pb.TokenType(alp=pb.ALP_TOKEN_TYPE_STANDARD),
                 genesis_info=pb.GenesisInfo(
                     token_ticker=b"TEST",
                     token_name=b"Test Token",
                     url=b"http://example.com",
                     data=b"Token Data",
                     auth_pubkey=b"Token Pubkey",
                     decimals=4,
                 ),
             ),
         )
         txs.append(genesis)
         tx_names.append("genesis")
-        genesis.send(node)
+        genesis.send(chronik)
         genesis.test(chronik)
 
         # ALP MINT tx
         tx = CTransaction()
         tx.vin = [
             CTxIn(
                 COutPoint(int(genesis.txid, 16), 5),
                 SCRIPTSIG_OP_TRUE,
             )
         ]
         tx.vout = [
             alp_opreturn(
                 alp_mint(
                     token_id=genesis.txid,
                     mint_amounts=[5, 0],
                     num_batons=1,
                 ),
             ),
             CTxOut(546, P2SH_OP_TRUE),
             CTxOut(546, P2SH_OP_TRUE),
             CTxOut(546, P2SH_OP_TRUE),
         ]
         mint = TokenTx(
             tx=tx,
             status=pb.TOKEN_STATUS_NORMAL,
             entries=[
                 pb.TokenEntry(
                     token_id=genesis.txid,
                     token_type=pb.TokenType(alp=pb.ALP_TOKEN_TYPE_STANDARD),
                     tx_type=pb.MINT,
                     actual_burn_amount="0",
                 )
             ],
             inputs=[alp_token(token_id=genesis.txid, is_mint_baton=True)],
             outputs=[
                 pb.Token(),
                 alp_token(token_id=genesis.txid, amount=5),
                 pb.Token(),
                 alp_token(token_id=genesis.txid, is_mint_baton=True),
             ],
         )
         txs.append(mint)
         tx_names.append("mint")
-        mint.send(node)
+        mint.send(chronik)
         mint.test(chronik)
 
         # ALP SEND tx
         tx = CTransaction()
         tx.vin = [
             CTxIn(
                 COutPoint(int(genesis.txid, 16), 1),
                 SCRIPTSIG_OP_TRUE,
             ),
             CTxIn(COutPoint(int(mint.txid, 16), 1), SCRIPTSIG_OP_TRUE),
         ]
         tx.vout = [
             alp_opreturn(
                 alp_send(
                     token_id=genesis.txid,
                     output_amounts=[3, 12],
                 ),
             ),
             CTxOut(5000, P2SH_OP_TRUE),
             CTxOut(546, P2SH_OP_TRUE),
         ]
         send = TokenTx(
             tx=tx,
             status=pb.TOKEN_STATUS_NORMAL,
             entries=[
                 pb.TokenEntry(
                     token_id=genesis.txid,
                     token_type=pb.TokenType(alp=pb.ALP_TOKEN_TYPE_STANDARD),
                     tx_type=pb.SEND,
                     actual_burn_amount="0",
                 )
             ],
             inputs=[
                 alp_token(token_id=genesis.txid, amount=10),
                 alp_token(token_id=genesis.txid, amount=5),
             ],
             outputs=[
                 pb.Token(),
                 alp_token(token_id=genesis.txid, amount=3),
                 alp_token(token_id=genesis.txid, amount=12),
             ],
         )
         txs.append(send)
         tx_names.append("send")
-        send.send(node)
+        send.send(chronik)
         send.test(chronik)
 
         # Another ALP GENESIS
         tx = CTransaction()
         tx.vin = [
             CTxIn(
                 COutPoint(int(genesis.txid, 16), 4),
                 SCRIPTSIG_OP_TRUE,
             )
         ]
         tx.vout = [
             alp_opreturn(
                 alp_genesis(
                     mint_amounts=[100],
                     num_batons=2,
                 ),
             ),
             CTxOut(5000, P2SH_OP_TRUE),
             CTxOut(5000, P2SH_OP_TRUE),
             CTxOut(5000, P2SH_OP_TRUE),
             CTxOut(coinvalue - 200000, P2SH_OP_TRUE),
         ]
         tx.rehash()
         genesis2 = TokenTx(
             tx=tx,
             status=pb.TOKEN_STATUS_NORMAL,
             entries=[
                 pb.TokenEntry(
                     token_id=tx.hash,
                     token_type=pb.TokenType(alp=pb.ALP_TOKEN_TYPE_STANDARD),
                     tx_type=pb.GENESIS,
                     actual_burn_amount="0",
                 ),
             ],
             inputs=[pb.Token()],
             outputs=[
                 pb.Token(),
                 alp_token(token_id=tx.hash, amount=100),
                 alp_token(token_id=tx.hash, is_mint_baton=True),
                 alp_token(token_id=tx.hash, is_mint_baton=True),
                 pb.Token(),
             ],
             token_info=pb.TokenInfo(
                 token_id=tx.hash,
                 token_type=pb.TokenType(alp=pb.ALP_TOKEN_TYPE_STANDARD),
                 genesis_info=pb.GenesisInfo(),
             ),
         )
         txs.append(genesis2)
         tx_names.append("genesis2")
-        genesis2.send(node)
+        genesis2.send(chronik)
         genesis2.test(chronik)
 
         # ALP GENESIS + MINT + SEND all in one
         tx = CTransaction()
         tx.vin = [
             CTxIn(COutPoint(int(send.txid, 16), 1), SCRIPTSIG_OP_TRUE),
             CTxIn(
                 COutPoint(int(genesis2.txid, 16), 2),
                 SCRIPTSIG_OP_TRUE,
             ),
         ]
         tx.vout = [
             alp_opreturn(
                 alp_genesis(
                     token_ticker=b"MULTI",
                     mint_amounts=[0xFFFF_FFFF_FFFF, 0],
                     num_batons=1,
                 ),
                 alp_mint(
                     token_id=genesis2.txid,
                     mint_amounts=[0, 5],
                     num_batons=0,
                 ),
                 alp_burn(
                     token_id=genesis.txid,
                     burn_amount=1,
                 ),
                 alp_send(
                     token_id=genesis.txid,
                     output_amounts=[0, 0, 0, 0, 2],
                 ),
             ),
             CTxOut(546, P2SH_OP_TRUE),
             CTxOut(546, P2SH_OP_TRUE),
             CTxOut(546, P2SH_OP_TRUE),
             CTxOut(546, P2SH_OP_TRUE),
             CTxOut(546, P2SH_OP_TRUE),
             CTxOut(546, P2SH_OP_TRUE),
         ]
         tx.rehash()
         multi = TokenTx(
             tx=tx,
             status=pb.TOKEN_STATUS_NORMAL,
             entries=[
                 pb.TokenEntry(
                     token_id=tx.hash,
                     token_type=pb.TokenType(alp=pb.ALP_TOKEN_TYPE_STANDARD),
                     tx_type=pb.GENESIS,
                     actual_burn_amount="0",
                 ),
                 pb.TokenEntry(
                     token_id=genesis2.txid,
                     token_type=pb.TokenType(alp=pb.ALP_TOKEN_TYPE_STANDARD),
                     tx_type=pb.MINT,
                     actual_burn_amount="0",
                 ),
                 pb.TokenEntry(
                     token_id=genesis.txid,
                     token_type=pb.TokenType(alp=pb.ALP_TOKEN_TYPE_STANDARD),
                     tx_type=pb.SEND,
                     intentional_burn=1,
                     actual_burn_amount="1",
                 ),
             ],
             inputs=[
                 alp_token(token_id=genesis.txid, amount=3, entry_idx=2),
                 alp_token(token_id=genesis2.txid, is_mint_baton=True, entry_idx=1),
             ],
             outputs=[
                 pb.Token(),
                 alp_token(token_id=tx.hash, amount=0xFFFF_FFFF_FFFF),
                 alp_token(token_id=genesis2.txid, amount=5, entry_idx=1),
                 alp_token(token_id=tx.hash, is_mint_baton=True),
                 pb.Token(),
                 alp_token(token_id=genesis.txid, amount=2, entry_idx=2),
                 pb.Token(),
             ],
             token_info=pb.TokenInfo(
                 token_id=tx.hash,
                 token_type=pb.TokenType(alp=pb.ALP_TOKEN_TYPE_STANDARD),
                 genesis_info=pb.GenesisInfo(token_ticker=b"MULTI"),
             ),
         )
         txs.append(multi)
         tx_names.append("multi")
-        multi.send(node)
+        multi.send(chronik)
         multi.test(chronik)
 
         # ALP tx with all kinds of things (so big it must be mined in a block manually)
         tx = CTransaction()
         tx.vin = [
             CTxIn(
                 COutPoint(int(genesis2.txid, 16), 3),
                 SCRIPTSIG_OP_TRUE,
             ),
             CTxIn(
                 COutPoint(int(genesis.txid, 16), 6),
                 SCRIPTSIG_OP_TRUE,
             ),
             CTxIn(COutPoint(int(multi.txid, 16), 1), SCRIPTSIG_OP_TRUE),
         ]
         tx.vout = [
             alp_opreturn(
                 # 0: success GENESIS
                 alp_genesis(
                     token_ticker=b"ALL",
                     mint_amounts=[0, 7, 0, 0, 1],
                     num_batons=2,
                 ),
                 # 1: fail GENESIS: must be first
                 alp_genesis(mint_amounts=[], num_batons=0),
                 # 2: fail MINT: Too few outputs
                 alp_mint(genesis.txid, [0, 0, 0, 0, 0, 0, 0], 99),
                 # 3: fail MINT: Overlapping amounts
                 alp_mint(genesis.txid, [0, 0xFFFF_FFFF_FFFF], 0),
                 # 4: fail MINT: Overlapping batons
                 alp_mint(genesis.txid, [0], 1),
                 # 5: success BURN: token ID 2
                 alp_burn(genesis.txid, 2),
                 # 6: success MINT: token ID 3
                 alp_mint(genesis2.txid, [3, 0], 1),
                 # 7: success MINT: token ID 2
                 alp_mint(genesis.txid, [0, 0, 0, 2, 0, 0, 0], 1),
                 # 8: fail MINT: Duplicate token ID 2
                 alp_mint(genesis.txid, [], 0),
                 # 9: fail BURN: Duplicate burn token ID 2
                 alp_burn(genesis.txid, 0),
                 # 10: fail SEND: Too few outputs
                 alp_send(multi.txid, [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 123]),
                 # 11: success SEND: token ID 4
                 alp_send(
                     multi.txid,
                     [0, 0, 0, 0, 0, 0, 0, 0, 0, 0xFFFF_FFFF_FFFF],
                 ),
                 # 12: fail MINT: Duplicate token ID 4
                 alp_mint(multi.txid, [], 0),
                 # 13: success UNKNOWN
                 b"SLP2\x89",
                 # 14: fail BURN: Descending token type
                 alp_burn(multi.txid, 0),
                 # 15: success UNKNOWN
                 b"SLP2\x9a",
             ),
             CTxOut(546, P2SH_OP_TRUE),
             CTxOut(546, P2SH_OP_TRUE),
             CTxOut(546, P2SH_OP_TRUE),
             CTxOut(1000, P2SH_OP_TRUE),
             CTxOut(546, P2SH_OP_TRUE),
             CTxOut(546, P2SH_OP_TRUE),
             CTxOut(546, P2SH_OP_TRUE),
             CTxOut(546, P2SH_OP_TRUE),
             CTxOut(546, P2SH_OP_TRUE),
             CTxOut(546, P2SH_OP_TRUE),
         ]
         tx.rehash()
         all_things = TokenTx(
             tx=tx,
             status=pb.TOKEN_STATUS_NOT_NORMAL,
             entries=[
                 pb.TokenEntry(
                     token_id=tx.hash,
                     token_type=pb.TokenType(alp=pb.ALP_TOKEN_TYPE_STANDARD),
                     tx_type=pb.GENESIS,
                     actual_burn_amount="0",
                     burn_summary="Invalid coloring at pushdata idx 1: GENESIS must be the first pushdata",
                     failed_colorings=[
                         pb.TokenFailedColoring(
                             pushdata_idx=1,
                             error="GENESIS must be the first pushdata",
                         )
                     ],
                 ),
                 pb.TokenEntry(
                     token_id=genesis2.txid,
                     token_type=pb.TokenType(alp=pb.ALP_TOKEN_TYPE_STANDARD),
                     tx_type=pb.MINT,
                     actual_burn_amount="0",
                 ),
                 pb.TokenEntry(
                     token_id=genesis.txid,
                     token_type=pb.TokenType(alp=pb.ALP_TOKEN_TYPE_STANDARD),
                     tx_type=pb.MINT,
                     intentional_burn=2,
                     actual_burn_amount="0",
                     burn_summary=f"""\
 Invalid coloring at pushdata idx 2: Too few outputs, expected 107 but got 11. Invalid \
 coloring at pushdata idx 3: Overlapping amount when trying to color 281474976710655 at \
 index 2, output is already colored with 7 of {tx.hash} (ALP STANDARD (V0)). Invalid \
 coloring at pushdata idx 4: Overlapping mint baton when trying to color mint baton at \
 index 2, output is already colored with 7 of {tx.hash} (ALP STANDARD (V0)). Invalid \
 coloring at pushdata idx 8: Duplicate token_id {genesis.txid}, found in section 2. \
 Invalid coloring at pushdata idx 9: Duplicate intentional burn token_id \
 {genesis.txid}, found in burn #0 and #1""",
                     failed_colorings=[
                         pb.TokenFailedColoring(
                             pushdata_idx=2,
                             error="Too few outputs, expected 107 but got 11",
                         ),
                         pb.TokenFailedColoring(
                             pushdata_idx=3,
                             error=f"""\
 Overlapping amount when trying to color 281474976710655 at index 2, output is already \
 colored with 7 of {tx.hash} (ALP STANDARD (V0))""",
                         ),
                         pb.TokenFailedColoring(
                             pushdata_idx=4,
                             error=f"""\
 Overlapping mint baton when trying to color mint baton at index 2, output is already \
 colored with 7 of {tx.hash} (ALP STANDARD (V0))""",
                         ),
                         pb.TokenFailedColoring(
                             pushdata_idx=8,
                             error=f"Duplicate token_id {genesis.txid}, found in section 2",
                         ),
                         pb.TokenFailedColoring(
                             pushdata_idx=9,
                             error=f"Duplicate intentional burn token_id {genesis.txid}, found in burn #0 and #1",
                         ),
                     ],
                 ),
                 pb.TokenEntry(
                     token_id=multi.txid,
                     token_type=pb.TokenType(alp=pb.ALP_TOKEN_TYPE_STANDARD),
                     tx_type=pb.SEND,
                     actual_burn_amount="0",
                     burn_summary=f"""\
 Invalid coloring at pushdata idx 10: Too few outputs, expected 13 but got 11. Invalid \
 coloring at pushdata idx 12: Duplicate token_id {multi.txid}, found in section 3. \
 Invalid coloring at pushdata idx 14: Descending token type: 137 > 0, token types must \
 be in ascending order""",
                     failed_colorings=[
                         pb.TokenFailedColoring(
                             pushdata_idx=10,
                             error="Too few outputs, expected 13 but got 11",
                         ),
                         pb.TokenFailedColoring(
                             pushdata_idx=12,
                             error=f"Duplicate token_id {multi.txid}, found in section 3",
                         ),
                         pb.TokenFailedColoring(
                             pushdata_idx=14,
                             error="Descending token type: 137 > 0, token types must be in ascending order",
                         ),
                     ],
                 ),
                 pb.TokenEntry(
                     token_id="00" * 32,
                     token_type=pb.TokenType(alp=0x89),
                     tx_type=pb.UNKNOWN,
                     actual_burn_amount="0",
                 ),
                 pb.TokenEntry(
                     token_id="00" * 32,
                     token_type=pb.TokenType(alp=0x9A),
                     tx_type=pb.UNKNOWN,
                     actual_burn_amount="0",
                 ),
             ],
             inputs=[
                 alp_token(token_id=genesis2.txid, is_mint_baton=True, entry_idx=1),
                 alp_token(token_id=genesis.txid, is_mint_baton=True, entry_idx=2),
                 alp_token(token_id=multi.txid, amount=0xFFFF_FFFF_FFFF, entry_idx=3),
             ],
             outputs=[
                 pb.Token(),
                 # success MINT: token ID 3
                 alp_token(token_id=genesis2.txid, amount=3, entry_idx=1),
                 # success GENESIS
                 alp_token(token_id=tx.hash, amount=7),
                 # success MINT: token ID 3
                 alp_token(token_id=genesis2.txid, is_mint_baton=True, entry_idx=1),
                 # success MINT: token ID 2
                 alp_token(token_id=genesis.txid, amount=2, entry_idx=2),
                 # success GENESIS
                 alp_token(token_id=tx.hash, amount=1),
                 # success GENESIS
                 alp_token(token_id=tx.hash, is_mint_baton=True),
                 # success GENESIS
                 alp_token(token_id=tx.hash, is_mint_baton=True),
                 # success MINT: token ID 2
                 alp_token(token_id=genesis.txid, is_mint_baton=True, entry_idx=2),
                 # success UNKNOWN
                 alp_token(
                     token_id="00" * 32, token_type=pb.TokenType(alp=0x89), entry_idx=4
                 ),
                 # success SEND: token ID 4
                 alp_token(
                     token_id=multi.txid,
                     amount=0xFFFF_FFFF_FFFF,
                     entry_idx=3,
                 ),
             ],
             token_info=pb.TokenInfo(
                 token_id=tx.hash,
                 token_type=pb.TokenType(alp=pb.ALP_TOKEN_TYPE_STANDARD),
                 genesis_info=pb.GenesisInfo(token_ticker=b"ALL"),
             ),
         )
         block_height = 102
         block = create_block(
             int(block_hashes[-1], 16),
             create_coinbase(block_height, b"\x03" * 33),
             1300000500,
         )
         block.vtx += [
             genesis.tx,
             send.tx,
             mint.tx,
             genesis2.tx,
             multi.tx,
             all_things.tx,
         ]
         make_conform_to_ctor(block)
         block.hashMerkleRoot = block.calc_merkle_root()
         block.solve()
         peer.send_blocks_and_test([block], node)
         all_things.test(chronik, block.hash)
 
         # After being mined, all previous txs still work fine:
         for tx in txs:
             tx.test(chronik, block.hash)
 
         # Undo block + test again
         node.invalidateblock(block.hash)
         for tx in txs:
             tx.test(chronik)
 
         # "all_things" not in the mempool (violates policy)
         chronik.tx(all_things.txid).err(404)
 
         # Mining txs one-by-one works
         block_height = 102
         prev_hash = block_hashes[-1]
         tx_block_hashes = [None] * len(txs)
         for block_idx, mined_tx in enumerate(txs):
             block = create_block(
                 int(prev_hash, 16),
                 create_coinbase(block_height + block_idx, b"\x03" * 33),
                 1300000500 + block_idx,
             )
             block.vtx += [mined_tx.tx]
             block.hashMerkleRoot = block.calc_merkle_root()
             block.solve()
             prev_hash = block.hash
             peer.send_blocks_and_test([block], node)
             tx_block_hashes[block_idx] = block.hash
 
             # All txs still work on every block
             for tx, block_hash in zip(txs, tx_block_hashes):
                 tx.test(chronik, block_hash)
 
         # Also mine all_things and test again
         block = create_block(
             int(prev_hash, 16),
             create_coinbase(block_height + len(txs), b"\x03" * 33),
             1300000500 + len(txs),
         )
         block.vtx += [all_things.tx]
         block.hashMerkleRoot = block.calc_merkle_root()
         block.solve()
         peer.send_blocks_and_test([block], node)
         all_things.test(chronik, block.hash)
         for tx, block_hash in zip(txs, tx_block_hashes):
             tx.test(chronik, block_hash)
 
         # Undo that block again + test
         node.invalidateblock(block.hash)
         for tx, block_hash in zip(txs, tx_block_hashes):
             tx.test(chronik, block_hash)
 
         # Invalidating all blocks one-by-one works
         for block_idx in reversed(range(len(txs))):
             node.invalidateblock(tx_block_hashes[block_idx])
             tx_block_hashes[block_idx] = None
             # All txs still work on every invalidation
             for tx, block_hash in zip(txs, tx_block_hashes):
                 tx.test(chronik, block_hash)
 
         # Kicking out all txs from the mempool by mining 1 conflict
         conflict_tx = CTransaction()
         conflict_tx.vin = [CTxIn(COutPoint(int(cointx, 16), 0), SCRIPTSIG_OP_TRUE)]
         pad_tx(conflict_tx)
         block = create_block(
             int(block_hashes[-1], 16),
             create_coinbase(block_height, b"\x03" * 33),
             1300000500,
         )
         block.vtx += [conflict_tx]
         block.hashMerkleRoot = block.calc_merkle_root()
         block.solve()
         peer.send_blocks_and_test([block], node)
         for tx in txs:
             chronik.tx(tx.txid).err(404)
 
 
 if __name__ == "__main__":
     ChronikTokenAlp().main()
diff --git a/test/functional/chronik_token_broadcast_txs.py b/test/functional/chronik_token_broadcast_txs.py
new file mode 100644
index 000000000..cf2ce45e8
--- /dev/null
+++ b/test/functional/chronik_token_broadcast_txs.py
@@ -0,0 +1,216 @@
+#!/usr/bin/env python3
+# Copyright (c) 2024 The Bitcoin developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""
+Test Chronik broadcasts a batch of txs correctly.
+"""
+
+from test_framework.address import (
+    ADDRESS_ECREG_P2SH_OP_TRUE,
+    ADDRESS_ECREG_UNSPENDABLE,
+    P2SH_OP_TRUE,
+    SCRIPTSIG_OP_TRUE,
+)
+from test_framework.chronik.alp import alp_genesis, alp_opreturn, alp_send
+from test_framework.chronik.token_tx import TokenTx
+from test_framework.messages import COutPoint, CTransaction, CTxIn, CTxOut
+from test_framework.script import CScript
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.util import assert_equal
+
+
+class ChronikTokenBroadcastTxs(BitcoinTestFramework):
+    def set_test_params(self):
+        self.setup_clean_chain = True
+        self.num_nodes = 1
+        self.extra_args = [["-chronik"]]
+
+    def skip_test_if_missing_module(self):
+        self.skip_if_no_chronik()
+
+    def run_test(self):
+        from test_framework.chronik.client import pb
+
+        def alp_token(token_type=None, **kwargs) -> pb.Token:
+            return pb.Token(
+                token_type=token_type or pb.TokenType(alp=pb.ALP_TOKEN_TYPE_STANDARD),
+                **kwargs,
+            )
+
+        node = self.nodes[0]
+        chronik = node.get_chronik_client()
+
+        coinblockhash = self.generatetoaddress(node, 1, ADDRESS_ECREG_P2SH_OP_TRUE)[0]
+        coinblock = node.getblock(coinblockhash)
+        cointx = coinblock["tx"][0]
+
+        self.generatetoaddress(node, 100, ADDRESS_ECREG_UNSPENDABLE)
+
+        coinvalue = 5000000000
+
+        txs = []
+
+        tx = CTransaction()
+        tx.vin = [CTxIn(COutPoint(int(cointx, 16), 0), SCRIPTSIG_OP_TRUE)]
+        tx.vout = [
+            alp_opreturn(
+                alp_genesis(
+                    mint_amounts=[1000, 2000, 3000, 4000, 5000, 6000],
+                    num_batons=0,
+                ),
+            ),
+            CTxOut(10000, P2SH_OP_TRUE),
+            CTxOut(10000, P2SH_OP_TRUE),
+            CTxOut(10000, P2SH_OP_TRUE),
+            CTxOut(10000, P2SH_OP_TRUE),
+            CTxOut(10000, P2SH_OP_TRUE),
+            CTxOut(10000, P2SH_OP_TRUE),
+            CTxOut(coinvalue - 100000, P2SH_OP_TRUE),
+        ]
+        tx.rehash()
+        genesis = TokenTx(
+            tx=tx,
+            status=pb.TOKEN_STATUS_NORMAL,
+            entries=[
+                pb.TokenEntry(
+                    token_id=tx.hash,
+                    token_type=pb.TokenType(alp=pb.ALP_TOKEN_TYPE_STANDARD),
+                    tx_type=pb.GENESIS,
+                    actual_burn_amount="0",
+                ),
+            ],
+            inputs=[pb.Token()],
+            outputs=[
+                pb.Token(),
+                alp_token(token_id=tx.hash, amount=1000),
+                alp_token(token_id=tx.hash, amount=2000),
+                alp_token(token_id=tx.hash, amount=3000),
+                alp_token(token_id=tx.hash, amount=4000),
+                alp_token(token_id=tx.hash, amount=5000),
+                alp_token(token_id=tx.hash, amount=6000),
+                pb.Token(),
+            ],
+            token_info=pb.TokenInfo(
+                token_id=tx.hash,
+                token_type=pb.TokenType(alp=pb.ALP_TOKEN_TYPE_STANDARD),
+                genesis_info=pb.GenesisInfo(),
+            ),
+        )
+        txs.append(genesis)
+        genesis.send(chronik)
+        genesis.test(chronik)
+
+        ok_tx = CTransaction()
+        ok_tx.vin = [CTxIn(COutPoint(int(genesis.txid, 16), 1), SCRIPTSIG_OP_TRUE)]
+        ok_tx.vout = [
+            alp_opreturn(alp_send(genesis.txid, [1000])),
+            CTxOut(546, P2SH_OP_TRUE),
+        ]
+        ok_tx.rehash()
+
+        burn_tx = CTransaction()
+        burn_tx.vin = [CTxIn(COutPoint(int(genesis.txid, 16), 2), SCRIPTSIG_OP_TRUE)]
+        burn_tx.vout = [
+            alp_opreturn(alp_send(genesis.txid, [1999])),
+            CTxOut(546, P2SH_OP_TRUE),
+        ]
+        burn_tx.rehash()
+
+        burn2_tx = CTransaction()
+        burn2_tx.vin = [CTxIn(COutPoint(int(genesis.txid, 16), 3), SCRIPTSIG_OP_TRUE)]
+        burn2_tx.vout = [
+            alp_opreturn(alp_send(genesis.txid, [3001])),
+            CTxOut(546, P2SH_OP_TRUE),
+        ]
+        burn2_tx.rehash()
+
+        wrong_sig_tx = CTransaction()
+        wrong_sig_tx.vin = [CTxIn(COutPoint(int(genesis.txid, 16), 4), CScript())]
+        wrong_sig_tx.vout = [
+            alp_opreturn(alp_send(genesis.txid, [4000])),
+            CTxOut(546, P2SH_OP_TRUE),
+        ]
+        wrong_sig_tx.rehash()
+
+        ok2_tx = CTransaction()
+        ok2_tx.vin = [CTxIn(COutPoint(int(genesis.txid, 16), 5), SCRIPTSIG_OP_TRUE)]
+        ok2_tx.vout = [
+            alp_opreturn(alp_send(genesis.txid, [5000])),
+            CTxOut(546, P2SH_OP_TRUE),
+        ]
+        ok2_tx.rehash()
+
+        ok3_tx = CTransaction()
+        ok3_tx.vin = [CTxIn(COutPoint(int(genesis.txid, 16), 6), SCRIPTSIG_OP_TRUE)]
+        ok3_tx.vout = [
+            alp_opreturn(alp_send(genesis.txid, [6000])),
+            CTxOut(546, P2SH_OP_TRUE),
+        ]
+        ok3_tx.rehash()
+
+        error = chronik.broadcast_txs(
+            [
+                ok_tx.serialize(),
+                burn_tx.serialize(),
+                burn2_tx.serialize(),
+                wrong_sig_tx.serialize(),
+            ]
+        ).err(400)
+
+        assert_equal(
+            error.msg,
+            f"""\
+400: Tx {burn_tx.hash} failed token checks: Unexpected burn: Burns 1 base tokens. Tx \
+{burn2_tx.hash} failed token checks: Unexpected burn: Burns 3000 base tokens. \
+Reason(s): Insufficient token input output sum: 3000 < 3001.""",
+        )
+
+        # Token checks succeed but invalid sig -> broadcasts ok_tx anyway
+        error = chronik.broadcast_txs(
+            [
+                ok_tx.serialize(),
+                wrong_sig_tx.serialize(),
+                ok2_tx.serialize(),
+            ]
+        ).err(400)
+        assert_equal(
+            error.msg,
+            """\
+400: Broadcast failed: Transaction rejected by mempool: \
+mandatory-script-verify-flag-failed (Operation not valid with the current stack size)\
+""",
+        )
+        chronik.tx(ok_tx.hash).ok()
+        chronik.tx(ok2_tx.hash).err(404)
+
+        # Broadcast multiple txs successfully
+        txids = (
+            chronik.broadcast_txs(
+                [ok2_tx.serialize(), ok3_tx.serialize()],
+            )
+            .ok()
+            .txids
+        )
+        assert_equal(
+            txids,
+            [bytes.fromhex(ok2_tx.hash)[::-1], bytes.fromhex(ok3_tx.hash)[::-1]],
+        )
+
+        # Skip token checks, broadcast burns without complaining
+        txids = (
+            chronik.broadcast_txs(
+                [burn_tx.serialize(), burn2_tx.serialize()],
+                skip_token_checks=True,
+            )
+            .ok()
+            .txids
+        )
+        assert_equal(
+            txids,
+            [bytes.fromhex(burn_tx.hash)[::-1], bytes.fromhex(burn2_tx.hash)[::-1]],
+        )
+
+
+if __name__ == "__main__":
+    ChronikTokenBroadcastTxs().main()
diff --git a/test/functional/chronik_token_burn.py b/test/functional/chronik_token_burn.py
index 5157fdffb..cc2a0b11d 100644
--- a/test/functional/chronik_token_burn.py
+++ b/test/functional/chronik_token_burn.py
@@ -1,264 +1,270 @@
 #!/usr/bin/env python3
 # Copyright (c) 2024 The Bitcoin developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """
 Test Chronik indexes token burns correctly.
 """
 
 from test_framework.address import (
     ADDRESS_ECREG_P2SH_OP_TRUE,
     ADDRESS_ECREG_UNSPENDABLE,
     P2SH_OP_TRUE,
     SCRIPTSIG_OP_TRUE,
 )
 from test_framework.chronik.alp import alp_burn, alp_genesis, alp_opreturn, alp_send
 from test_framework.chronik.slp import slp_burn, slp_genesis
 from test_framework.chronik.token_tx import TokenTx
 from test_framework.messages import COutPoint, CTransaction, CTxIn, CTxOut
 from test_framework.test_framework import BitcoinTestFramework
 
 
 class ChronikTokenBurn(BitcoinTestFramework):
     def set_test_params(self):
         self.setup_clean_chain = True
         self.num_nodes = 1
         self.extra_args = [["-chronik"]]
 
     def skip_test_if_missing_module(self):
         self.skip_if_no_chronik()
 
     def run_test(self):
         from test_framework.chronik.client import pb
 
         def slp_token(token_type=None, **kwargs) -> pb.Token:
             return pb.Token(
                 token_type=token_type or pb.TokenType(slp=pb.SLP_TOKEN_TYPE_FUNGIBLE),
                 **kwargs,
             )
 
         def alp_token(token_type=None, **kwargs) -> pb.Token:
             return pb.Token(
                 token_type=token_type or pb.TokenType(alp=pb.ALP_TOKEN_TYPE_STANDARD),
                 **kwargs,
             )
 
         node = self.nodes[0]
         chronik = node.get_chronik_client()
 
         mocktime = 1300000000
         node.setmocktime(mocktime)
 
         coinblockhash = self.generatetoaddress(node, 1, ADDRESS_ECREG_P2SH_OP_TRUE)[0]
         coinblock = node.getblock(coinblockhash)
         cointx = coinblock["tx"][0]
 
         self.generatetoaddress(node, 100, ADDRESS_ECREG_UNSPENDABLE)
 
         coinvalue = 5000000000
 
         txs = []
 
         tx = CTransaction()
         tx.vin = [CTxIn(COutPoint(int(cointx, 16), 0), SCRIPTSIG_OP_TRUE)]
         tx.vout = [
             CTxOut(
                 0,
                 slp_genesis(
                     token_type=pb.SLP_TOKEN_TYPE_FUNGIBLE,
                     mint_baton_vout=2,
                     initial_mint_amount=5000,
                 ),
             ),
             CTxOut(10000, P2SH_OP_TRUE),
             CTxOut(10000, P2SH_OP_TRUE),
             CTxOut(coinvalue - 100000, P2SH_OP_TRUE),
         ]
         tx.rehash()
         genesis_slp = TokenTx(
             tx=tx,
             status=pb.TOKEN_STATUS_NORMAL,
             entries=[
                 pb.TokenEntry(
                     token_id=tx.hash,
                     token_type=pb.TokenType(slp=pb.SLP_TOKEN_TYPE_FUNGIBLE),
                     tx_type=pb.GENESIS,
                     actual_burn_amount="0",
                 ),
             ],
             inputs=[pb.Token()],
             outputs=[
                 pb.Token(),
                 slp_token(token_id=tx.hash, amount=5000),
                 slp_token(token_id=tx.hash, is_mint_baton=True),
                 pb.Token(),
             ],
             token_info=pb.TokenInfo(
                 token_id=tx.hash,
                 token_type=pb.TokenType(slp=pb.SLP_TOKEN_TYPE_FUNGIBLE),
                 genesis_info=pb.GenesisInfo(),
             ),
         )
         txs.append(genesis_slp)
-        genesis_slp.send(node)
+        genesis_slp.send(chronik)
         genesis_slp.test(chronik)
 
         tx = CTransaction()
         tx.vin = [CTxIn(COutPoint(int(genesis_slp.txid, 16), 1), SCRIPTSIG_OP_TRUE)]
         tx.vout = [
             CTxOut(
                 0,
                 slp_burn(
                     token_type=pb.SLP_TOKEN_TYPE_FUNGIBLE,
                     token_id=genesis_slp.txid,
                     amount=5000,
                 ),
             ),
         ]
         burn_slp = TokenTx(
             tx=tx,
             status=pb.TOKEN_STATUS_NORMAL,
             entries=[
                 pb.TokenEntry(
                     token_id=genesis_slp.txid,
                     token_type=pb.TokenType(slp=pb.SLP_TOKEN_TYPE_FUNGIBLE),
                     tx_type=pb.BURN,
                     actual_burn_amount="5000",
                     intentional_burn=5000,
                 ),
             ],
             inputs=[slp_token(token_id=genesis_slp.txid, amount=5000)],
             outputs=[pb.Token()],
         )
         txs.append(burn_slp)
-        burn_slp.send(node)
+        burn_slp.send(chronik)
         burn_slp.test(chronik)
 
         tx = CTransaction()
         tx.vin = [CTxIn(COutPoint(int(genesis_slp.txid, 16), 3), SCRIPTSIG_OP_TRUE)]
         tx.vout = [
             alp_opreturn(
                 alp_genesis(
                     mint_amounts=[1000],
                     num_batons=1,
                 ),
             ),
             CTxOut(10000, P2SH_OP_TRUE),
             CTxOut(10000, P2SH_OP_TRUE),
             CTxOut(coinvalue - 200000, P2SH_OP_TRUE),
         ]
         tx.rehash()
         genesis_alp = TokenTx(
             tx=tx,
             status=pb.TOKEN_STATUS_NORMAL,
             entries=[
                 pb.TokenEntry(
                     token_id=tx.hash,
                     token_type=pb.TokenType(alp=pb.ALP_TOKEN_TYPE_STANDARD),
                     tx_type=pb.GENESIS,
                     actual_burn_amount="0",
                 ),
             ],
             inputs=[pb.Token()],
             outputs=[
                 pb.Token(),
                 alp_token(token_id=tx.hash, amount=1000),
                 alp_token(token_id=tx.hash, is_mint_baton=True),
                 pb.Token(),
             ],
             token_info=pb.TokenInfo(
                 token_id=tx.hash,
                 token_type=pb.TokenType(alp=pb.ALP_TOKEN_TYPE_STANDARD),
                 genesis_info=pb.GenesisInfo(),
             ),
         )
         txs.append(genesis_alp)
-        genesis_alp.send(node)
+        genesis_alp.send(chronik)
         genesis_alp.test(chronik)
 
         tx = CTransaction()
         tx.vin = [CTxIn(COutPoint(int(genesis_alp.txid, 16), 1), SCRIPTSIG_OP_TRUE)]
         tx.vout = [
             alp_opreturn(
                 alp_send(genesis_alp.txid, [400]),
                 alp_burn(genesis_alp.txid, 500),
             ),
             CTxOut(546, P2SH_OP_TRUE),
         ]
         burn_alp = TokenTx(
             tx=tx,
             status=pb.TOKEN_STATUS_NOT_NORMAL,
             entries=[
                 pb.TokenEntry(
                     token_id=genesis_alp.txid,
                     token_type=pb.TokenType(alp=pb.ALP_TOKEN_TYPE_STANDARD),
                     tx_type=pb.SEND,
                     burn_summary="Unexpected burn: Burns 600 base tokens, but intended to burn 500; burned 100 too many",
                     actual_burn_amount="600",
                     intentional_burn=500,
                 ),
             ],
             inputs=[alp_token(token_id=genesis_alp.txid, amount=1000)],
             outputs=[
                 pb.Token(),
                 alp_token(token_id=genesis_alp.txid, amount=400),
             ],
         )
         txs.append(burn_alp)
-        burn_alp.send(node)
+        burn_alp.send(
+            chronik,
+            error=f"400: Tx {burn_alp.txid} failed token checks: Unexpected burn: Burns 600 base tokens, but intended to burn 500; burned 100 too many.",
+        )
         burn_alp.test(chronik)
 
         # Burns SLP mint baton + ALP tokens without any OP_RETURN
         tx = CTransaction()
         tx.vin = [
             CTxIn(COutPoint(int(genesis_slp.txid, 16), 2), SCRIPTSIG_OP_TRUE),
             CTxIn(COutPoint(int(burn_alp.txid, 16), 1), SCRIPTSIG_OP_TRUE),
         ]
         tx.vout = [
             CTxOut(546, P2SH_OP_TRUE),
         ]
         bare_burn = TokenTx(
             tx=tx,
             status=pb.TOKEN_STATUS_NOT_NORMAL,
             entries=[
                 pb.TokenEntry(
                     token_id=genesis_slp.txid,
                     token_type=pb.TokenType(slp=pb.SLP_TOKEN_TYPE_FUNGIBLE),
                     is_invalid=True,
                     burn_summary="Unexpected burn: Burns mint baton(s)",
                     actual_burn_amount="0",
                     burns_mint_batons=True,
                 ),
                 pb.TokenEntry(
                     token_id=genesis_alp.txid,
                     token_type=pb.TokenType(alp=pb.ALP_TOKEN_TYPE_STANDARD),
                     is_invalid=True,
                     burn_summary="Unexpected burn: Burns 400 base tokens",
                     actual_burn_amount="400",
                 ),
             ],
             inputs=[
                 slp_token(token_id=genesis_slp.txid, is_mint_baton=True),
                 alp_token(token_id=genesis_alp.txid, amount=400, entry_idx=1),
             ],
             outputs=[
                 pb.Token(),
             ],
         )
         txs.append(bare_burn)
-        bare_burn.send(node)
+        bare_burn.send(
+            chronik,
+            error=f"400: Tx {bare_burn.txid} failed token checks: Unexpected burn: Burns mint baton(s). Unexpected burn: Burns 400 base tokens.",
+        )
         bare_burn.test(chronik)
 
         # After mining, all txs still work fine
         block_hash = self.generatetoaddress(node, 1, ADDRESS_ECREG_UNSPENDABLE)[0]
         for tx in txs:
             tx.test(chronik, block_hash)
 
         # Undo block + test again
         node.invalidateblock(block_hash)
         for tx in txs:
             tx.test(chronik)
 
 
 if __name__ == "__main__":
     ChronikTokenBurn().main()
diff --git a/test/functional/chronik_token_parse_failure.py b/test/functional/chronik_token_parse_failure.py
index 89e79005a..057dfd419 100644
--- a/test/functional/chronik_token_parse_failure.py
+++ b/test/functional/chronik_token_parse_failure.py
@@ -1,120 +1,130 @@
 #!/usr/bin/env python3
 # Copyright (c) 2024 The Bitcoin developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """
 Test Chronik indexes parse failures correctly.
 """
 
 from test_framework.address import (
     ADDRESS_ECREG_P2SH_OP_TRUE,
     ADDRESS_ECREG_UNSPENDABLE,
     P2SH_OP_TRUE,
     SCRIPTSIG_OP_TRUE,
 )
 from test_framework.chronik.token_tx import TokenTx
 from test_framework.messages import COutPoint, CTransaction, CTxIn, CTxOut
 from test_framework.script import OP_RESERVED, OP_RETURN, CScript
 from test_framework.test_framework import BitcoinTestFramework
 
 
 class ChronikTokenParseFailure(BitcoinTestFramework):
     def set_test_params(self):
         self.setup_clean_chain = True
         self.num_nodes = 1
         self.extra_args = [["-chronik"]]
 
     def skip_test_if_missing_module(self):
         self.skip_if_no_chronik()
 
     def run_test(self):
         from test_framework.chronik.client import pb
 
         node = self.nodes[0]
         chronik = node.get_chronik_client()
 
         mocktime = 1300000000
         node.setmocktime(mocktime)
 
         coinblockhash = self.generatetoaddress(node, 1, ADDRESS_ECREG_P2SH_OP_TRUE)[0]
         coinblock = node.getblock(coinblockhash)
         cointx = coinblock["tx"][0]
 
         self.generatetoaddress(node, 100, ADDRESS_ECREG_UNSPENDABLE)
 
         coinvalue = 5000000000
 
         txs = []
 
         tx = CTransaction()
         tx.vin = [CTxIn(COutPoint(int(cointx, 16), 0), SCRIPTSIG_OP_TRUE)]
         invalid_slp_script = CScript([OP_RETURN, b"SLP\0", b"\x01", b"GENESIS", b""])
         tx.vout = [
             CTxOut(0, invalid_slp_script),
             CTxOut(coinvalue - 100000, P2SH_OP_TRUE),
         ]
         tx.rehash()
         invalid_slp = TokenTx(
             tx=tx,
             status=pb.TOKEN_STATUS_NOT_NORMAL,
             entries=[],
             inputs=[pb.Token()],
             outputs=[pb.Token(), pb.Token()],
             failed_parsings=[
                 pb.TokenFailedParsing(
                     pushdata_idx=-1,
                     bytes=bytes(invalid_slp_script),
                     error="SLP error: Disallowed push: OP_0 at op 4",
                 )
             ],
         )
         txs.append(invalid_slp)
-        invalid_slp.send(node)
+        invalid_slp.send(
+            chronik,
+            error=f"400: Tx {invalid_slp.txid} failed token checks: Parsing failed: SLP error: Disallowed push: OP_0 at op 4.",
+        )
         invalid_slp.test(chronik)
 
         tx = CTransaction()
         tx.vin = [CTxIn(COutPoint(int(invalid_slp.txid, 16), 1), SCRIPTSIG_OP_TRUE)]
         invalid_alp_script = CScript(
             [OP_RETURN, OP_RESERVED, b"SLP2\0\x07GENESIS", b"OK", b"SLP\0"]
         )
         tx.vout = [
             CTxOut(0, invalid_alp_script),
             CTxOut(coinvalue - 200000, P2SH_OP_TRUE),
         ]
         tx.rehash()
         invalid_alp = TokenTx(
             tx=tx,
             status=pb.TOKEN_STATUS_NOT_NORMAL,
             entries=[],
             inputs=[pb.Token()],
             outputs=[pb.Token(), pb.Token()],
             failed_parsings=[
                 pb.TokenFailedParsing(
                     pushdata_idx=0,
                     bytes=b"SLP2\0\x07GENESIS",
                     error="ALP error: Not enough bytes: expected 1 more bytes but got 0 for field token_ticker",
                 ),
                 pb.TokenFailedParsing(
                     pushdata_idx=2,
                     bytes=b"SLP\0",
                     error='ALP error: Invalid LOKAD ID "SLP\\0", did you accidentally use eMPP?',
                 ),
             ],
         )
         txs.append(invalid_alp)
-        invalid_alp.send(node)
+        invalid_alp.send(
+            chronik,
+            error=f"""\
+400: Tx {invalid_alp.txid} failed token checks: Parsing failed at pushdata idx 0: ALP \
+error: Not enough bytes: expected 1 more bytes but got 0 for field token_ticker. \
+Parsing failed at pushdata idx 2: ALP error: Invalid LOKAD ID \"SLP\\0\", did you \
+accidentally use eMPP?.""",
+        )
         invalid_alp.test(chronik)
 
         # After mining, all txs still work fine
         block_hash = self.generatetoaddress(node, 1, ADDRESS_ECREG_UNSPENDABLE)[0]
         for tx in txs:
             tx.test(chronik, block_hash)
 
         # Undo block + test again
         node.invalidateblock(block_hash)
         for tx in txs:
             tx.test(chronik)
 
 
 if __name__ == "__main__":
     ChronikTokenParseFailure().main()
diff --git a/test/functional/chronik_token_script_group.py b/test/functional/chronik_token_script_group.py
index b316d15bb..fc715027d 100644
--- a/test/functional/chronik_token_script_group.py
+++ b/test/functional/chronik_token_script_group.py
@@ -1,331 +1,331 @@
 #!/usr/bin/env python3
 # Copyright (c) 2024 The Bitcoin developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """
 Test Chronik's /script endpoint works well with tokens.
 """
 
 from itertools import zip_longest
 
 from test_framework.address import (
     ADDRESS_ECREG_P2SH_OP_TRUE,
     ADDRESS_ECREG_UNSPENDABLE,
     P2SH_OP_TRUE,
     SCRIPTSIG_OP_TRUE,
 )
 from test_framework.chronik.alp import alp_genesis, alp_opreturn
 from test_framework.chronik.slp import slp_genesis, slp_send
 from test_framework.chronik.token_tx import TokenTx
 from test_framework.messages import COutPoint, CTransaction, CTxIn, CTxOut
 from test_framework.script import OP_EQUAL, OP_HASH160, CScript
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.util import assert_equal
 
 
 class ChronikTokenScriptGroup(BitcoinTestFramework):
     def set_test_params(self):
         self.setup_clean_chain = True
         self.num_nodes = 1
         self.extra_args = [["-chronik"]]
 
     def skip_test_if_missing_module(self):
         self.skip_if_no_chronik()
 
     def run_test(self):
         from test_framework.chronik.client import pb
 
         def slp_token(token_type=None, **kwargs) -> pb.Token:
             return pb.Token(
                 token_type=token_type or pb.TokenType(slp=pb.SLP_TOKEN_TYPE_FUNGIBLE),
                 **kwargs,
             )
 
         def alp_token(token_type=None, **kwargs) -> pb.Token:
             return pb.Token(
                 token_type=token_type or pb.TokenType(alp=pb.ALP_TOKEN_TYPE_STANDARD),
                 **kwargs,
             )
 
         node = self.nodes[0]
         chronik = node.get_chronik_client()
 
         mocktime = 1300000000
         node.setmocktime(mocktime)
 
         coinblockhash = self.generatetoaddress(node, 1, ADDRESS_ECREG_P2SH_OP_TRUE)[0]
         coinblock = node.getblock(coinblockhash)
         cointx = coinblock["tx"][0]
 
         self.generatetoaddress(node, 100, ADDRESS_ECREG_UNSPENDABLE)
 
         coinvalue = 5000000000
 
         txs = []
 
         tx = CTransaction()
         tx.vin = [CTxIn(COutPoint(int(cointx, 16), 0), SCRIPTSIG_OP_TRUE)]
         tx.vout = [
             CTxOut(
                 0,
                 slp_genesis(
                     token_type=pb.SLP_TOKEN_TYPE_FUNGIBLE,
                     mint_baton_vout=2,
                     initial_mint_amount=5000,
                 ),
             ),
             CTxOut(10000, P2SH_OP_TRUE),
             CTxOut(546, CScript([OP_HASH160, b"\x01" * 20, OP_EQUAL])),
             CTxOut(coinvalue - 100000, P2SH_OP_TRUE),
         ]
         tx.rehash()
         genesis_slp = TokenTx(
             tx=tx,
             status=pb.TOKEN_STATUS_NORMAL,
             entries=[
                 pb.TokenEntry(
                     token_id=tx.hash,
                     token_type=pb.TokenType(slp=pb.SLP_TOKEN_TYPE_FUNGIBLE),
                     tx_type=pb.GENESIS,
                     actual_burn_amount="0",
                 ),
             ],
             inputs=[pb.Token()],
             outputs=[
                 pb.Token(),
                 slp_token(token_id=tx.hash, amount=5000),
                 slp_token(token_id=tx.hash, is_mint_baton=True),
                 pb.Token(),
             ],
             token_info=pb.TokenInfo(
                 token_id=tx.hash,
                 token_type=pb.TokenType(slp=pb.SLP_TOKEN_TYPE_FUNGIBLE),
                 genesis_info=pb.GenesisInfo(),
             ),
         )
         txs.append(genesis_slp)
-        genesis_slp.send(node)
+        genesis_slp.send(chronik)
         genesis_slp.test(chronik)
 
         tx = CTransaction()
         tx.vin = [CTxIn(COutPoint(int(genesis_slp.txid, 16), 1), SCRIPTSIG_OP_TRUE)]
         tx.vout = [
             CTxOut(
                 0,
                 slp_send(
                     token_type=pb.SLP_TOKEN_TYPE_FUNGIBLE,
                     token_id=genesis_slp.txid,
                     amounts=[1000, 2000, 1500, 500],
                 ),
             ),
             CTxOut(546, CScript([OP_HASH160, b"\x01" * 20, OP_EQUAL])),
             CTxOut(546, CScript([OP_HASH160, b"\x01" * 20, OP_EQUAL])),
             CTxOut(546, CScript([OP_HASH160, b"\x02" * 20, OP_EQUAL])),
             CTxOut(546, CScript([OP_HASH160, b"\x02" * 20, OP_EQUAL])),
         ]
         send_slp = TokenTx(
             tx=tx,
             status=pb.TOKEN_STATUS_NORMAL,
             entries=[
                 pb.TokenEntry(
                     token_id=genesis_slp.txid,
                     token_type=pb.TokenType(slp=pb.SLP_TOKEN_TYPE_FUNGIBLE),
                     tx_type=pb.SEND,
                     actual_burn_amount="0",
                 ),
             ],
             inputs=[slp_token(token_id=genesis_slp.txid, amount=5000)],
             outputs=[
                 pb.Token(),
                 slp_token(token_id=genesis_slp.txid, amount=1000),
                 slp_token(token_id=genesis_slp.txid, amount=2000),
                 slp_token(token_id=genesis_slp.txid, amount=1500),
                 slp_token(token_id=genesis_slp.txid, amount=500),
             ],
         )
         txs.append(send_slp)
-        send_slp.send(node)
+        send_slp.send(chronik)
         send_slp.test(chronik)
 
         tx = CTransaction()
         tx.vin = [CTxIn(COutPoint(int(genesis_slp.txid, 16), 3), SCRIPTSIG_OP_TRUE)]
         tx.vout = [
             alp_opreturn(
                 alp_genesis(
                     mint_amounts=[10, 20],
                     num_batons=1,
                 ),
             ),
             CTxOut(546, CScript([OP_HASH160, b"\x01" * 20, OP_EQUAL])),
             CTxOut(546, CScript([OP_HASH160, b"\x01" * 20, OP_EQUAL])),
             CTxOut(546, CScript([OP_HASH160, b"\x01" * 20, OP_EQUAL])),
             CTxOut(coinvalue - 200000, P2SH_OP_TRUE),
         ]
         tx.rehash()
         genesis_alp = TokenTx(
             tx=tx,
             status=pb.TOKEN_STATUS_NORMAL,
             entries=[
                 pb.TokenEntry(
                     token_id=tx.hash,
                     token_type=pb.TokenType(alp=pb.ALP_TOKEN_TYPE_STANDARD),
                     tx_type=pb.GENESIS,
                     actual_burn_amount="0",
                 ),
             ],
             inputs=[pb.Token()],
             outputs=[
                 pb.Token(),
                 alp_token(token_id=tx.hash, amount=10),
                 alp_token(token_id=tx.hash, amount=20),
                 alp_token(token_id=tx.hash, is_mint_baton=True),
                 pb.Token(),
             ],
             token_info=pb.TokenInfo(
                 token_id=tx.hash,
                 token_type=pb.TokenType(alp=pb.ALP_TOKEN_TYPE_STANDARD),
                 genesis_info=pb.GenesisInfo(),
             ),
         )
         txs.append(genesis_alp)
-        genesis_alp.send(node)
+        genesis_alp.send(chronik)
         genesis_alp.test(chronik)
 
         script1_txs = [genesis_slp, send_slp, genesis_alp]
         script1_txs = sorted(script1_txs, key=lambda tx: tx.txid)
         history_txs = chronik.script("p2sh", "01" * 20).unconfirmed_txs().ok().txs
         for tx, proto_tx in zip_longest(script1_txs, history_txs):
             tx.test_tx(proto_tx)
 
         script2_txs = [send_slp]
         history_txs = chronik.script("p2sh", "02" * 20).unconfirmed_txs().ok().txs
         for tx, proto_tx in zip_longest(script2_txs, history_txs):
             tx.test_tx(proto_tx)
 
         script1_utxos = [
             pb.ScriptUtxo(
                 outpoint=pb.OutPoint(
                     txid=bytes.fromhex(genesis_slp.txid)[::-1], out_idx=2
                 ),
                 block_height=-1,
                 value=546,
                 token=slp_token(
                     token_id=genesis_slp.txid, is_mint_baton=True, entry_idx=-1
                 ),
             ),
             pb.ScriptUtxo(
                 outpoint=pb.OutPoint(
                     txid=bytes.fromhex(send_slp.txid)[::-1], out_idx=1
                 ),
                 block_height=-1,
                 value=546,
                 token=slp_token(token_id=genesis_slp.txid, amount=1000, entry_idx=-1),
             ),
             pb.ScriptUtxo(
                 outpoint=pb.OutPoint(
                     txid=bytes.fromhex(send_slp.txid)[::-1], out_idx=2
                 ),
                 block_height=-1,
                 value=546,
                 token=slp_token(token_id=genesis_slp.txid, amount=2000, entry_idx=-1),
             ),
             pb.ScriptUtxo(
                 outpoint=pb.OutPoint(
                     txid=bytes.fromhex(genesis_alp.txid)[::-1], out_idx=1
                 ),
                 block_height=-1,
                 value=546,
                 token=alp_token(token_id=genesis_alp.txid, amount=10, entry_idx=-1),
             ),
             pb.ScriptUtxo(
                 outpoint=pb.OutPoint(
                     txid=bytes.fromhex(genesis_alp.txid)[::-1], out_idx=2
                 ),
                 block_height=-1,
                 value=546,
                 token=alp_token(token_id=genesis_alp.txid, amount=20, entry_idx=-1),
             ),
             pb.ScriptUtxo(
                 outpoint=pb.OutPoint(
                     txid=bytes.fromhex(genesis_alp.txid)[::-1], out_idx=3
                 ),
                 block_height=-1,
                 value=546,
                 token=alp_token(
                     token_id=genesis_alp.txid, is_mint_baton=True, entry_idx=-1
                 ),
             ),
         ]
         script1_utxos = sorted(script1_utxos, key=lambda o: o.outpoint.txid[::-1])
         utxos = chronik.script("p2sh", "01" * 20).utxos().ok().utxos
         for utxo, proto_utxo in zip_longest(script1_utxos, utxos):
             assert_equal(utxo, proto_utxo)
 
         script2_utxos = [
             pb.ScriptUtxo(
                 outpoint=pb.OutPoint(
                     txid=bytes.fromhex(send_slp.txid)[::-1], out_idx=3
                 ),
                 block_height=-1,
                 value=546,
                 token=slp_token(token_id=genesis_slp.txid, amount=1500, entry_idx=-1),
             ),
             pb.ScriptUtxo(
                 outpoint=pb.OutPoint(
                     txid=bytes.fromhex(send_slp.txid)[::-1], out_idx=4
                 ),
                 block_height=-1,
                 value=546,
                 token=slp_token(token_id=genesis_slp.txid, amount=500, entry_idx=-1),
             ),
         ]
         script2_utxos = sorted(script2_utxos, key=lambda o: o.outpoint.txid[::-1])
         utxos = chronik.script("p2sh", "02" * 20).utxos().ok().utxos
         for utxo, proto_utxo in zip_longest(script2_utxos, utxos):
             assert_equal(utxo, proto_utxo)
 
         # After mining, all txs still work fine
         block_hash = self.generatetoaddress(node, 1, ADDRESS_ECREG_UNSPENDABLE)[0]
         for tx in txs:
             tx.test(chronik, block_hash)
 
         history_txs = chronik.script("p2sh", "01" * 20).confirmed_txs().ok().txs
         for tx, proto_tx in zip_longest(script1_txs, history_txs):
             tx.test_tx(proto_tx, block_hash)
         history_txs = chronik.script("p2sh", "02" * 20).confirmed_txs().ok().txs
         for tx, proto_tx in zip_longest(script2_txs, history_txs):
             tx.test_tx(proto_tx, block_hash)
 
         for utxo in script1_utxos + script2_utxos:
             utxo.block_height = 102
         utxos = chronik.script("p2sh", "01" * 20).utxos().ok().utxos
         for utxo, proto_utxo in zip_longest(script1_utxos, utxos):
             assert_equal(utxo, proto_utxo)
         utxos = chronik.script("p2sh", "02" * 20).utxos().ok().utxos
         for utxo, proto_utxo in zip_longest(script2_utxos, utxos):
             assert_equal(utxo, proto_utxo)
 
         # Undo block + test again
         node.invalidateblock(block_hash)
         for tx in txs:
             tx.test(chronik)
 
         history_txs = chronik.script("p2sh", "01" * 20).unconfirmed_txs().ok().txs
         for tx, proto_tx in zip_longest(script1_txs, history_txs):
             tx.test_tx(proto_tx)
         history_txs = chronik.script("p2sh", "02" * 20).unconfirmed_txs().ok().txs
         for tx, proto_tx in zip_longest(script2_txs, history_txs):
             tx.test_tx(proto_tx)
 
         for utxo in script1_utxos + script2_utxos:
             utxo.block_height = -1
         utxos = chronik.script("p2sh", "01" * 20).utxos().ok().utxos
         for utxo, proto_utxo in zip_longest(script1_utxos, utxos):
             assert_equal(utxo, proto_utxo)
         utxos = chronik.script("p2sh", "02" * 20).utxos().ok().utxos
         for utxo, proto_utxo in zip_longest(script2_utxos, utxos):
             assert_equal(utxo, proto_utxo)
 
 
 if __name__ == "__main__":
     ChronikTokenScriptGroup().main()
diff --git a/test/functional/chronik_token_slp_fungible.py b/test/functional/chronik_token_slp_fungible.py
index 1dd0cb800..07b153211 100644
--- a/test/functional/chronik_token_slp_fungible.py
+++ b/test/functional/chronik_token_slp_fungible.py
@@ -1,248 +1,248 @@
 #!/usr/bin/env python3
 # Copyright (c) 2024 The Bitcoin developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """
 Test Chronik indexes fungible SLP tokens.
 """
 
 from test_framework.address import (
     ADDRESS_ECREG_P2SH_OP_TRUE,
     ADDRESS_ECREG_UNSPENDABLE,
     P2SH_OP_TRUE,
     SCRIPTSIG_OP_TRUE,
 )
 from test_framework.chronik.slp import slp_genesis, slp_mint, slp_send
 from test_framework.chronik.token_tx import TokenTx
 from test_framework.messages import COutPoint, CTransaction, CTxIn, CTxOut
 from test_framework.test_framework import BitcoinTestFramework
 
 
 class ChronikTokenSlpFungible(BitcoinTestFramework):
     def set_test_params(self):
         self.setup_clean_chain = True
         self.num_nodes = 1
         self.extra_args = [["-chronik"]]
 
     def skip_test_if_missing_module(self):
         self.skip_if_no_chronik()
 
     def run_test(self):
         from test_framework.chronik.client import pb
 
         def slp_token(token_type=None, **kwargs) -> pb.Token:
             return pb.Token(
                 token_type=token_type or pb.TokenType(slp=pb.SLP_TOKEN_TYPE_FUNGIBLE),
                 **kwargs,
             )
 
         node = self.nodes[0]
         chronik = node.get_chronik_client()
 
         mocktime = 1300000000
         node.setmocktime(mocktime)
 
         coinblockhash = self.generatetoaddress(node, 1, ADDRESS_ECREG_P2SH_OP_TRUE)[0]
         coinblock = node.getblock(coinblockhash)
         cointx = coinblock["tx"][0]
 
         self.generatetoaddress(node, 100, ADDRESS_ECREG_UNSPENDABLE)
 
         coinvalue = 5000000000
 
         txs = []
 
         tx = CTransaction()
         tx.vin = [CTxIn(COutPoint(int(cointx, 16), 0), SCRIPTSIG_OP_TRUE)]
         tx.vout = [
             CTxOut(
                 0,
                 slp_genesis(
                     token_type=pb.SLP_TOKEN_TYPE_FUNGIBLE,
                     token_ticker=b"SLPTEST",
                     token_name=b"Test SLP Token 3",
                     token_document_url=b"http://example/slp",
                     token_document_hash=b"x" * 32,
                     decimals=4,
                     mint_baton_vout=2,
                     initial_mint_amount=5000,
                 ),
             ),
             CTxOut(10000, P2SH_OP_TRUE),
             CTxOut(10000, P2SH_OP_TRUE),
             CTxOut(coinvalue - 400000, P2SH_OP_TRUE),
         ]
         tx.rehash()
         genesis = TokenTx(
             tx=tx,
             status=pb.TOKEN_STATUS_NORMAL,
             entries=[
                 pb.TokenEntry(
                     token_id=tx.hash,
                     token_type=pb.TokenType(slp=pb.SLP_TOKEN_TYPE_FUNGIBLE),
                     tx_type=pb.GENESIS,
                     actual_burn_amount="0",
                 ),
             ],
             inputs=[pb.Token()],
             outputs=[
                 pb.Token(),
                 pb.Token(
                     token_id=tx.hash,
                     amount=5000,
                     token_type=pb.TokenType(slp=pb.SLP_TOKEN_TYPE_FUNGIBLE),
                 ),
                 pb.Token(
                     token_id=tx.hash,
                     is_mint_baton=True,
                     token_type=pb.TokenType(slp=pb.SLP_TOKEN_TYPE_FUNGIBLE),
                 ),
                 pb.Token(),
             ],
             token_info=pb.TokenInfo(
                 token_id=tx.hash,
                 token_type=pb.TokenType(slp=pb.SLP_TOKEN_TYPE_FUNGIBLE),
                 genesis_info=pb.GenesisInfo(
                     token_ticker=b"SLPTEST",
                     token_name=b"Test SLP Token 3",
                     url=b"http://example/slp",
                     hash=b"x" * 32,
                     decimals=4,
                 ),
             ),
         )
         txs.append(genesis)
-        genesis.send(node)
+        genesis.send(chronik)
         genesis.test(chronik)
 
         tx = CTransaction()
         tx.vin = [CTxIn(COutPoint(int(genesis.txid, 16), 2), SCRIPTSIG_OP_TRUE)]
         tx.vout = [
             CTxOut(
                 0,
                 slp_mint(
                     token_type=pb.SLP_TOKEN_TYPE_FUNGIBLE,
                     token_id=genesis.txid,
                     mint_baton_vout=3,
                     mint_amount=20,
                 ),
             ),
             CTxOut(2000, P2SH_OP_TRUE),
             CTxOut(2000, P2SH_OP_TRUE),
             CTxOut(2000, P2SH_OP_TRUE),
         ]
         mint = TokenTx(
             tx=tx,
             status=pb.TOKEN_STATUS_NORMAL,
             entries=[
                 pb.TokenEntry(
                     token_id=genesis.txid,
                     token_type=pb.TokenType(slp=pb.SLP_TOKEN_TYPE_FUNGIBLE),
                     tx_type=pb.MINT,
                     actual_burn_amount="0",
                 ),
             ],
             inputs=[slp_token(token_id=genesis.txid, is_mint_baton=True)],
             outputs=[
                 pb.Token(),
                 slp_token(token_id=genesis.txid, amount=20),
                 pb.Token(),
                 slp_token(token_id=genesis.txid, is_mint_baton=True),
             ],
         )
         txs.append(mint)
-        mint.send(node)
+        mint.send(chronik)
         mint.test(chronik)
 
         tx = CTransaction()
         tx.vin = [CTxIn(COutPoint(int(genesis.txid, 16), 1), SCRIPTSIG_OP_TRUE)]
         tx.vout = [
             CTxOut(
                 0,
                 slp_send(
                     token_type=pb.SLP_TOKEN_TYPE_FUNGIBLE,
                     token_id=genesis.txid,
                     amounts=[1000, 4000],
                 ),
             ),
             CTxOut(4000, P2SH_OP_TRUE),
             CTxOut(4000, P2SH_OP_TRUE),
         ]
         send = TokenTx(
             tx=tx,
             status=pb.TOKEN_STATUS_NORMAL,
             entries=[
                 pb.TokenEntry(
                     token_id=genesis.txid,
                     token_type=pb.TokenType(slp=pb.SLP_TOKEN_TYPE_FUNGIBLE),
                     tx_type=pb.SEND,
                     actual_burn_amount="0",
                 ),
             ],
             inputs=[
                 slp_token(token_id=genesis.txid, amount=5000),
             ],
             outputs=[
                 pb.Token(),
                 slp_token(token_id=genesis.txid, amount=1000),
                 slp_token(token_id=genesis.txid, amount=4000),
             ],
         )
         txs.append(send)
-        send.send(node)
+        send.send(chronik)
         send.test(chronik)
 
         # SLP GENESIS with empty GenesisInfo
         tx = CTransaction()
         tx.vin = [CTxIn(COutPoint(int(genesis.txid, 16), 3), SCRIPTSIG_OP_TRUE)]
         tx.vout = [
             CTxOut(
                 0,
                 slp_genesis(
                     token_type=pb.SLP_TOKEN_TYPE_FUNGIBLE,
                     mint_baton_vout=None,
                     initial_mint_amount=0,
                 ),
             ),
             CTxOut(coinvalue - 500000, P2SH_OP_TRUE),
         ]
         tx.rehash()
         genesis_empty = TokenTx(
             tx=tx,
             status=pb.TOKEN_STATUS_NORMAL,
             entries=[
                 pb.TokenEntry(
                     token_id=tx.hash,
                     token_type=pb.TokenType(slp=pb.SLP_TOKEN_TYPE_FUNGIBLE),
                     tx_type=pb.GENESIS,
                     actual_burn_amount="0",
                 ),
             ],
             inputs=[pb.Token()],
             outputs=[
                 pb.Token(),
                 pb.Token(),
             ],
             token_info=pb.TokenInfo(
                 token_id=tx.hash,
                 token_type=pb.TokenType(slp=pb.SLP_TOKEN_TYPE_FUNGIBLE),
                 genesis_info=pb.GenesisInfo(),
             ),
         )
         txs.append(genesis_empty)
-        genesis_empty.send(node)
+        genesis_empty.send(chronik)
         genesis_empty.test(chronik)
 
         # After mining, all txs still work fine
         block_hash = self.generatetoaddress(node, 1, ADDRESS_ECREG_UNSPENDABLE)[0]
         for tx in txs:
             tx.test(chronik, block_hash)
 
         # Undo block + test again
         node.invalidateblock(block_hash)
         for tx in txs:
             tx.test(chronik)
 
 
 if __name__ == "__main__":
     ChronikTokenSlpFungible().main()
diff --git a/test/functional/chronik_token_slp_mint_vault.py b/test/functional/chronik_token_slp_mint_vault.py
index 662eeba0a..2487874bc 100644
--- a/test/functional/chronik_token_slp_mint_vault.py
+++ b/test/functional/chronik_token_slp_mint_vault.py
@@ -1,344 +1,350 @@
 #!/usr/bin/env python3
 # Copyright (c) 2024 The Bitcoin developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """
 Test Chronik indexes SLP V2 MINT VAULT txs correctly.
 """
 
 from test_framework.address import (
     ADDRESS_ECREG_P2SH_OP_TRUE,
     ADDRESS_ECREG_UNSPENDABLE,
     P2SH_OP_TRUE,
     SCRIPTSIG_OP_TRUE,
 )
 from test_framework.blocktools import (
     create_block,
     create_coinbase,
     make_conform_to_ctor,
 )
 from test_framework.chronik.slp import slp_genesis, slp_mint_vault, slp_send
 from test_framework.chronik.token_tx import TokenTx
 from test_framework.messages import COutPoint, CTransaction, CTxIn, CTxOut
 from test_framework.p2p import P2PDataStore
 from test_framework.script import OP_12, OP_EQUAL, OP_HASH160, CScript, hash160
 from test_framework.test_framework import BitcoinTestFramework
 from test_framework.txtools import pad_tx
 
 
 class ChronikTokenSlpMintVault(BitcoinTestFramework):
     def set_test_params(self):
         self.setup_clean_chain = True
         self.num_nodes = 1
         self.extra_args = [["-chronik"]]
 
     def skip_test_if_missing_module(self):
         self.skip_if_no_chronik()
 
     def run_test(self):
         from test_framework.chronik.client import pb
 
         def vault_token(token_type=None, **kwargs) -> pb.Token:
             return pb.Token(
                 token_type=token_type or pb.TokenType(slp=pb.SLP_TOKEN_TYPE_MINT_VAULT),
                 **kwargs,
             )
 
         node = self.nodes[0]
         chronik = node.get_chronik_client()
 
         peer = node.add_p2p_connection(P2PDataStore())
         mocktime = 1300000000
         node.setmocktime(mocktime)
 
         coinblockhash = self.generatetoaddress(node, 1, ADDRESS_ECREG_P2SH_OP_TRUE)[0]
         coinblock = node.getblock(coinblockhash)
         cointx = coinblock["tx"][0]
 
         block_hashes = self.generatetoaddress(node, 100, ADDRESS_ECREG_UNSPENDABLE)
 
         coinvalue = 5000000000
 
         # Fan-out UTXOs so we have coins to work with
         fan_tx = CTransaction()
         fan_tx.vin = [CTxIn(COutPoint(int(cointx, 16), 0), SCRIPTSIG_OP_TRUE)]
         fan_tx.vout = [
             CTxOut(100000, P2SH_OP_TRUE),
             CTxOut(100000, P2SH_OP_TRUE),
             CTxOut(100000, P2SH_OP_TRUE),
             CTxOut(coinvalue - 500000, P2SH_OP_TRUE),
         ]
         fan_tx.rehash()
         fan_txid = node.sendrawtransaction(fan_tx.serialize().hex())
 
         # VAULT script locking MINT txs
         mint_vault_script = CScript([OP_12])
         mint_vault_scripthash = hash160(mint_vault_script)
 
         # Setup vault UTXOs
         vault_setup_tx = CTransaction()
         vault_setup_tx.vin = [CTxIn(COutPoint(int(fan_txid, 16), 1), SCRIPTSIG_OP_TRUE)]
         vault_setup_tx.vout = [
             CTxOut(10000, CScript([OP_HASH160, mint_vault_scripthash, OP_EQUAL])),
             CTxOut(10000, CScript([OP_HASH160, mint_vault_scripthash, OP_EQUAL])),
             CTxOut(79000, CScript([OP_HASH160, mint_vault_scripthash, OP_EQUAL])),
         ]
         pad_tx(vault_setup_tx)
         vault_setup_txid = node.sendrawtransaction(vault_setup_tx.serialize().hex())
         # Mine VAULT setup txs
         block_hashes += self.generatetoaddress(node, 1, ADDRESS_ECREG_UNSPENDABLE)
 
         # SLP V2 MINT VAULT GENESIS
         tx = CTransaction()
         tx.vin = [CTxIn(COutPoint(int(fan_txid, 16), 0), SCRIPTSIG_OP_TRUE)]
         tx.vout = [
             CTxOut(
                 0,
                 slp_genesis(
                     token_type=pb.SLP_TOKEN_TYPE_MINT_VAULT,
                     token_ticker=b"SLPVAULT",
                     token_name=b"0",
                     token_document_url=b"0",
                     token_document_hash=b"x" * 32,
                     mint_vault_scripthash=mint_vault_scripthash,
                     initial_mint_amount=1000,
                 ),
             ),
             CTxOut(546, P2SH_OP_TRUE),
             CTxOut(99000, P2SH_OP_TRUE),
         ]
         tx.rehash()
         genesis = TokenTx(
             tx=tx,
             status=pb.TOKEN_STATUS_NORMAL,
             entries=[
                 pb.TokenEntry(
                     token_id=tx.hash,
                     token_type=pb.TokenType(slp=pb.SLP_TOKEN_TYPE_MINT_VAULT),
                     tx_type=pb.GENESIS,
                     actual_burn_amount="0",
                 ),
             ],
             inputs=[pb.Token()],
             outputs=[
                 pb.Token(),
                 vault_token(token_id=tx.hash, amount=1000),
                 pb.Token(),
             ],
             token_info=pb.TokenInfo(
                 token_id=tx.hash,
                 token_type=pb.TokenType(slp=pb.SLP_TOKEN_TYPE_MINT_VAULT),
                 genesis_info=pb.GenesisInfo(
                     token_ticker=b"SLPVAULT",
                     token_name=b"0",
                     url=b"0",
                     hash=b"x" * 32,
                     mint_vault_scripthash=mint_vault_scripthash,
                 ),
             ),
         )
-        genesis.send(node)
+        genesis.send(chronik)
         genesis.test(chronik)
 
         # SLP V2 MINT VAULT MINT
         tx = CTransaction()
         tx.vin = [
             CTxIn(
                 COutPoint(int(vault_setup_txid, 16), 0),
                 CScript([bytes(CScript([OP_12]))]),
             )
         ]
         tx.vout = [
             CTxOut(
                 0,
                 slp_mint_vault(
                     token_id=genesis.txid,
                     mint_amounts=[4000],
                 ),
             ),
             CTxOut(9000, P2SH_OP_TRUE),
         ]
         # MINT tx, but invalid because the GENESIS tx isn't mined yet
         mint = TokenTx(
             tx=tx,
             status=pb.TOKEN_STATUS_NOT_NORMAL,
             entries=[
                 pb.TokenEntry(
                     token_id=genesis.txid,
                     token_type=pb.TokenType(slp=pb.SLP_TOKEN_TYPE_MINT_VAULT),
                     tx_type=pb.MINT,
                     is_invalid=True,
                     burn_summary="Validation error: Missing MINT vault",
                     actual_burn_amount="0",
                 ),
             ],
             inputs=[pb.Token()],
             outputs=[
                 pb.Token(),
                 pb.Token(),
             ],
         )
-        mint.send(node)
+        mint.send(
+            chronik,
+            error=f"400: Tx {mint.txid} failed token checks: Validation error: Missing MINT vault.",
+        )
         mint.test(chronik)
 
         # Mine only the GENESIS tx
         block_height = 103
         block = create_block(
             int(block_hashes[-1], 16),
             create_coinbase(block_height, b"\x03" * 33),
             1300000500,
         )
         block.vtx += [genesis.tx]
         block.hashMerkleRoot = block.calc_merkle_root()
         block.solve()
         peer.send_blocks_and_test([block], node)
         genesis.test(chronik, block.hash)
 
         # MINT is still invalid, despite GENESIS being mined.
         # This inconsistency is intended behavior, see chronik/chronik-db/src/mem/tokens.rs for details.
         mint.test(chronik)
 
         # Another SLP V2 MINT VAULT MINT, this time valid because the GENESIS is mined.
         tx = CTransaction()
         tx.vin = [
             CTxIn(
                 COutPoint(int(vault_setup_txid, 16), 1),
                 CScript([bytes(CScript([OP_12]))]),
             )
         ]
         tx.vout = [
             CTxOut(
                 0,
                 slp_mint_vault(
                     token_id=genesis.txid,
                     mint_amounts=[5000],
                 ),
             ),
             CTxOut(546, P2SH_OP_TRUE),
         ]
         mint2 = TokenTx(
             tx=tx,
             status=pb.TOKEN_STATUS_NORMAL,
             entries=[
                 pb.TokenEntry(
                     token_id=genesis.txid,
                     token_type=pb.TokenType(slp=pb.SLP_TOKEN_TYPE_MINT_VAULT),
                     tx_type=pb.MINT,
                     actual_burn_amount="0",
                 ),
             ],
             inputs=[pb.Token()],
             outputs=[
                 pb.Token(),
                 vault_token(token_id=genesis.txid, amount=5000),
             ],
         )
-        mint2.send(node)
+        mint2.send(chronik)
         mint2.test(chronik)
 
         # Reorg block with the GENESIS tx
         node.invalidateblock(block.hash)
 
         # GENESIS still valid
         genesis.test(chronik)
         # MINT still invalid (correctly, now)
         mint.test(chronik)
 
         # mint2 now invalid (disconnect removes and re-adds all mempool txs)
         mint2.status = pb.TOKEN_STATUS_NOT_NORMAL
         mint2.entries[0].is_invalid = True
         mint2.entries[0].burn_summary = "Validation error: Missing MINT vault"
         mint2.outputs = [pb.Token(), pb.Token()]
         mint2.test(chronik)
 
         # Mine GENESIS and mint2
         block_height = 103
         block = create_block(
             int(block_hashes[-1], 16),
             create_coinbase(block_height, b"\x03" * 33),
             1300000500,
         )
         block.vtx += [genesis.tx, mint2.tx]
         make_conform_to_ctor(block)
         block.hashMerkleRoot = block.calc_merkle_root()
         block.solve()
         peer.send_blocks_and_test([block], node)
         block_hashes.append(block.hash)
 
         # GENESIS still valid
         genesis.test(chronik, block.hash)
         # MINTs still invalid
         mint.test(chronik)
         mint2.test(chronik, block.hash)
 
         # Add SEND to mempool from `mint`
         tx = CTransaction()
         tx.vin = [
             CTxIn(
                 COutPoint(int(mint.txid, 16), 1),
                 SCRIPTSIG_OP_TRUE,
             )
         ]
         tx.vout = [
             CTxOut(
                 0,
                 slp_send(
                     token_type=pb.SLP_TOKEN_TYPE_MINT_VAULT,
                     token_id=genesis.txid,
                     amounts=[3000, 1000],
                 ),
             ),
             CTxOut(546, P2SH_OP_TRUE),
             CTxOut(546, P2SH_OP_TRUE),
         ]
         send = TokenTx(
             tx=tx,
             status=pb.TOKEN_STATUS_NOT_NORMAL,
             entries=[
                 pb.TokenEntry(
                     token_id=genesis.txid,
                     token_type=pb.TokenType(slp=pb.SLP_TOKEN_TYPE_MINT_VAULT),
                     tx_type=pb.SEND,
                     is_invalid=True,
                     actual_burn_amount="0",
                     burn_summary="Validation error: Insufficient token input output sum: 0 < 4000",
                 ),
             ],
             inputs=[pb.Token()],
             outputs=[
                 pb.Token(),
                 pb.Token(),
                 pb.Token(),
             ],
         )
-        send.send(node)
+        send.send(
+            chronik,
+            error=f"400: Tx {send.txid} failed token checks: Validation error: Insufficient token input output sum: 0 < 4000.",
+        )
         send.test(chronik)
 
         # Mine mint
         block_hashes += self.generatetoaddress(node, 1, ADDRESS_ECREG_UNSPENDABLE)
         # Now it becomes valid
         mint.status = pb.TOKEN_STATUS_NORMAL
         mint.entries[0].is_invalid = False
         mint.entries[0].burn_summary = ""
         mint.outputs = [pb.Token(), vault_token(token_id=genesis.txid, amount=4000)]
         mint.test(chronik, block_hashes[-1])
         # The SEND also transitively becomes valid
         send.status = pb.TOKEN_STATUS_NORMAL
         send.entries[0].is_invalid = False
         send.entries[0].burn_summary = ""
         send.inputs = [vault_token(token_id=genesis.txid, amount=4000)]
         send.outputs = [
             pb.Token(),
             vault_token(token_id=genesis.txid, amount=3000),
             vault_token(token_id=genesis.txid, amount=1000),
         ]
         send.test(chronik, block_hashes[-1])
 
         # After invalidating the last block, the txs are still valid
         node.invalidateblock(block_hashes[-1])
         mint.test(chronik)
         send.test(chronik)
 
 
 if __name__ == "__main__":
     ChronikTokenSlpMintVault().main()
diff --git a/test/functional/chronik_token_slp_nft1.py b/test/functional/chronik_token_slp_nft1.py
index 5bd66a4d1..abbc14b51 100644
--- a/test/functional/chronik_token_slp_nft1.py
+++ b/test/functional/chronik_token_slp_nft1.py
@@ -1,261 +1,261 @@
 #!/usr/bin/env python3
 # Copyright (c) 2024 The Bitcoin developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 """
 Test Chronik indexes SLP NFT1 tokens correctly.
 """
 
 from test_framework.address import (
     ADDRESS_ECREG_P2SH_OP_TRUE,
     ADDRESS_ECREG_UNSPENDABLE,
     P2SH_OP_TRUE,
     SCRIPTSIG_OP_TRUE,
 )
 from test_framework.chronik.slp import slp_genesis, slp_mint, slp_send
 from test_framework.chronik.token_tx import TokenTx
 from test_framework.messages import COutPoint, CTransaction, CTxIn, CTxOut
 from test_framework.test_framework import BitcoinTestFramework
 
 
 class ChronikTokenSlpNft1(BitcoinTestFramework):
     def set_test_params(self):
         self.setup_clean_chain = True
         self.num_nodes = 1
         self.extra_args = [["-chronik"]]
 
     def skip_test_if_missing_module(self):
         self.skip_if_no_chronik()
 
     def run_test(self):
         from test_framework.chronik.client import pb
 
         def group_token(token_type=None, **kwargs) -> pb.Token:
             return pb.Token(
                 token_type=token_type or pb.TokenType(slp=pb.SLP_TOKEN_TYPE_NFT1_GROUP),
                 **kwargs,
             )
 
         def child_token(token_type=None, **kwargs) -> pb.Token:
             return pb.Token(
                 token_type=token_type or pb.TokenType(slp=pb.SLP_TOKEN_TYPE_NFT1_CHILD),
                 **kwargs,
             )
 
         node = self.nodes[0]
         chronik = node.get_chronik_client()
 
         mocktime = 1300000000
         node.setmocktime(mocktime)
 
         coinblockhash = self.generatetoaddress(node, 1, ADDRESS_ECREG_P2SH_OP_TRUE)[0]
         coinblock = node.getblock(coinblockhash)
         cointx = coinblock["tx"][0]
 
         self.generatetoaddress(node, 100, ADDRESS_ECREG_UNSPENDABLE)
 
         coinvalue = 5000000000
 
         txs = []
 
         tx = CTransaction()
         tx.vin = [CTxIn(COutPoint(int(cointx, 16), 0), SCRIPTSIG_OP_TRUE)]
         tx.vout = [
             CTxOut(
                 0,
                 slp_genesis(
                     token_type=pb.SLP_TOKEN_TYPE_NFT1_GROUP,
                     token_ticker=b"SLP NFT GROUP",
                     token_name=b"Slp NFT GROUP token",
                     token_document_url=b"http://slp.nft",
                     token_document_hash=b"x" * 32,
                     decimals=4,
                     mint_baton_vout=2,
                     initial_mint_amount=5000,
                 ),
             ),
             CTxOut(10000, P2SH_OP_TRUE),
             CTxOut(10000, P2SH_OP_TRUE),
             CTxOut(coinvalue - 400000, P2SH_OP_TRUE),
         ]
         tx.rehash()
         genesis = TokenTx(
             tx=tx,
             status=pb.TOKEN_STATUS_NORMAL,
             entries=[
                 pb.TokenEntry(
                     token_id=tx.hash,
                     token_type=pb.TokenType(slp=pb.SLP_TOKEN_TYPE_NFT1_GROUP),
                     tx_type=pb.GENESIS,
                     actual_burn_amount="0",
                 ),
             ],
             inputs=[pb.Token()],
             outputs=[
                 pb.Token(),
                 group_token(token_id=tx.hash, amount=5000),
                 group_token(token_id=tx.hash, is_mint_baton=True),
                 pb.Token(),
             ],
             token_info=pb.TokenInfo(
                 token_id=tx.hash,
                 token_type=pb.TokenType(slp=pb.SLP_TOKEN_TYPE_NFT1_GROUP),
                 genesis_info=pb.GenesisInfo(
                     token_ticker=b"SLP NFT GROUP",
                     token_name=b"Slp NFT GROUP token",
                     url=b"http://slp.nft",
                     hash=b"x" * 32,
                     decimals=4,
                 ),
             ),
         )
         txs.append(genesis)
-        genesis.send(node)
+        genesis.send(chronik)
         genesis.test(chronik)
 
         tx = CTransaction()
         tx.vin = [CTxIn(COutPoint(int(genesis.txid, 16), 2), SCRIPTSIG_OP_TRUE)]
         tx.vout = [
             CTxOut(
                 0,
                 slp_mint(
                     token_type=pb.SLP_TOKEN_TYPE_NFT1_GROUP,
                     token_id=genesis.txid,
                     mint_baton_vout=3,
                     mint_amount=20,
                 ),
             ),
             CTxOut(2000, P2SH_OP_TRUE),
             CTxOut(2000, P2SH_OP_TRUE),
             CTxOut(2000, P2SH_OP_TRUE),
         ]
         mint = TokenTx(
             tx=tx,
             status=pb.TOKEN_STATUS_NORMAL,
             entries=[
                 pb.TokenEntry(
                     token_id=genesis.txid,
                     token_type=pb.TokenType(slp=pb.SLP_TOKEN_TYPE_NFT1_GROUP),
                     tx_type=pb.MINT,
                     actual_burn_amount="0",
                 ),
             ],
             inputs=[group_token(token_id=genesis.txid, is_mint_baton=True)],
             outputs=[
                 pb.Token(),
                 group_token(token_id=genesis.txid, amount=20),
                 pb.Token(),
                 group_token(token_id=genesis.txid, is_mint_baton=True),
             ],
         )
         txs.append(mint)
-        mint.send(node)
+        mint.send(chronik)
         mint.test(chronik)
 
         tx = CTransaction()
         tx.vin = [CTxIn(COutPoint(int(genesis.txid, 16), 1), SCRIPTSIG_OP_TRUE)]
         tx.vout = [
             CTxOut(
                 0,
                 slp_send(
                     token_type=pb.SLP_TOKEN_TYPE_NFT1_GROUP,
                     token_id=genesis.txid,
                     amounts=[1, 99, 900, 4000],
                 ),
             ),
             CTxOut(2000, P2SH_OP_TRUE),
             CTxOut(2000, P2SH_OP_TRUE),
             CTxOut(2000, P2SH_OP_TRUE),
             CTxOut(2000, P2SH_OP_TRUE),
         ]
         send = TokenTx(
             tx=tx,
             status=pb.TOKEN_STATUS_NORMAL,
             entries=[
                 pb.TokenEntry(
                     token_id=genesis.txid,
                     token_type=pb.TokenType(slp=pb.SLP_TOKEN_TYPE_NFT1_GROUP),
                     tx_type=pb.SEND,
                     actual_burn_amount="0",
                 ),
             ],
             inputs=[
                 group_token(token_id=genesis.txid, amount=5000),
             ],
             outputs=[
                 pb.Token(),
                 group_token(token_id=genesis.txid, amount=1),
                 group_token(token_id=genesis.txid, amount=99),
                 group_token(token_id=genesis.txid, amount=900),
                 group_token(token_id=genesis.txid, amount=4000),
             ],
         )
         txs.append(send)
-        send.send(node)
+        send.send(chronik)
         send.test(chronik)
 
         # NFT1 CHILD GENESIS
         tx = CTransaction()
         tx.vin = [CTxIn(COutPoint(int(send.txid, 16), 1), SCRIPTSIG_OP_TRUE)]
         tx.vout = [
             CTxOut(
                 0,
                 slp_genesis(
                     token_type=pb.SLP_TOKEN_TYPE_NFT1_CHILD,
                     token_ticker=b"SLP NFT CHILD",
                     token_name=b"Slp NFT CHILD token",
                     decimals=0,
                     initial_mint_amount=1,
                 ),
             ),
             CTxOut(1400, P2SH_OP_TRUE),
         ]
         tx.rehash()
         child_genesis1 = TokenTx(
             tx=tx,
             status=pb.TOKEN_STATUS_NORMAL,
             entries=[
                 pb.TokenEntry(
                     token_id=tx.hash,
                     token_type=pb.TokenType(slp=pb.SLP_TOKEN_TYPE_NFT1_CHILD),
                     tx_type=pb.GENESIS,
                     group_token_id=genesis.txid,
                     actual_burn_amount="0",
                 ),
                 pb.TokenEntry(
                     token_id=genesis.txid,
                     token_type=pb.TokenType(slp=pb.SLP_TOKEN_TYPE_NFT1_GROUP),
                     actual_burn_amount="0",
                 ),
             ],
             inputs=[group_token(token_id=genesis.txid, entry_idx=1, amount=1)],
             outputs=[
                 pb.Token(),
                 child_token(token_id=tx.hash, amount=1),
             ],
             token_info=pb.TokenInfo(
                 token_id=tx.hash,
                 token_type=pb.TokenType(slp=pb.SLP_TOKEN_TYPE_NFT1_CHILD),
                 genesis_info=pb.GenesisInfo(
                     token_ticker=b"SLP NFT CHILD",
                     token_name=b"Slp NFT CHILD token",
                 ),
             ),
         )
         txs.append(child_genesis1)
-        child_genesis1.send(node)
+        child_genesis1.send(chronik)
         child_genesis1.test(chronik)
 
         # After mining, all txs still work fine
         block_hash = self.generatetoaddress(node, 1, ADDRESS_ECREG_UNSPENDABLE)[0]
         for tx in txs:
             tx.test(chronik, block_hash)
 
         # Undo block + test again
         node.invalidateblock(block_hash)
         for tx in txs:
             tx.test(chronik)
 
 
 if __name__ == "__main__":
     ChronikTokenSlpNft1().main()
diff --git a/test/functional/test_framework/chronik/client.py b/test/functional/test_framework/chronik/client.py
index 224a4066e..bf896dbec 100644
--- a/test/functional/test_framework/chronik/client.py
+++ b/test/functional/test_framework/chronik/client.py
@@ -1,262 +1,292 @@
 #!/usr/bin/env python3
 # Copyright (c) 2023 The Bitcoin developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
 import http.client
 import threading
 import time
-from typing import List, Union
+from typing import List, Optional, Union
 
 import chronik_pb2 as pb
 import websocket
 
 # Timespan when HTTP requests to Chronik time out
 DEFAULT_TIMEOUT = 30
 
 
 class UnexpectedContentType(Exception):
     pass
 
 
 class ChronikResponse:
     def __init__(self, status: int, *, ok_proto=None, error_proto=None) -> None:
         self.status = status
         self.ok_proto = ok_proto
         self.error_proto = error_proto
 
     def ok(self):
         if self.status != 200:
             raise AssertionError(
                 f"Expected OK response, but got status {self.status}, error: "
                 f"{self.error_proto}"
             )
         return self.ok_proto
 
     def err(self, status: int):
         if self.status == 200:
             raise AssertionError(
                 f"Expected error response status {status}, but got OK: {self.ok_proto}"
             )
         if self.status != status:
             raise AssertionError(
                 f"Expected error response status {status}, but got different error "
                 f"status {self.status}, error: {self.error_proto}"
             )
         return self.error_proto
 
 
 class ChronikScriptClient:
     def __init__(
         self, client: "ChronikClient", script_type: str, script_payload: str
     ) -> None:
         self.client = client
         self.script_type = script_type
         self.script_payload = script_payload
 
     def confirmed_txs(self, page=None, page_size=None):
         query = _page_query_params(page, page_size)
         return self.client._request_get(
             f"/script/{self.script_type}/{self.script_payload}/confirmed-txs{query}",
             pb.TxHistoryPage,
         )
 
     def history(self, page=None, page_size=None):
         query = _page_query_params(page, page_size)
         return self.client._request_get(
             f"/script/{self.script_type}/{self.script_payload}/history{query}",
             pb.TxHistoryPage,
         )
 
     def unconfirmed_txs(self):
         return self.client._request_get(
             f"/script/{self.script_type}/{self.script_payload}/unconfirmed-txs",
             pb.TxHistoryPage,
         )
 
     def utxos(self):
         return self.client._request_get(
             f"/script/{self.script_type}/{self.script_payload}/utxos", pb.ScriptUtxos
         )
 
 
 class ChronikWs:
     def __init__(self, client: "ChronikClient", **kwargs) -> None:
         self.messages: List[pb.WsMsg] = []
         self.errors: List[str] = []
         self.timeout = kwargs.get("timeout", client.timeout)
         self.ping_interval = kwargs.get("ping_interval", 10)
         self.ping_timeout = kwargs.get("ping_timeout", 5)
         self.is_open = False
         self.ws_url = (
             f"{'wss' if client.https else 'ws'}://{client.host}:{client.port}/ws"
         )
 
         self.ws = websocket.WebSocketApp(
             self.ws_url,
             on_message=self.on_message,
             on_error=self.on_error,
             on_open=self.on_open,
             on_close=self.on_close,
             on_ping=self.on_ping,
             on_pong=self.on_pong,
         )
 
         self.ws_thread = threading.Thread(
             target=self.ws.run_forever,
             kwargs={
                 "ping_interval": self.ping_interval,
                 "ping_timeout": self.ping_timeout,
                 "ping_payload": "Bitcoin ABC functional test framework",
             },
         )
         self.ws_thread.start()
 
         connect_timeout = time.time() + self.timeout
         while not self.is_open:
             if time.time() > connect_timeout:
                 self.close()
                 raise TimeoutError(
                     f"Connection to chronik websocket {self.ws_url} timed out after {self.timeout}s"
                 )
             time.sleep(0.05)
 
     def on_message(self, ws, message):
         ws_msg = pb.WsMsg()
         ws_msg.ParseFromString(message)
         self.messages.append(ws_msg)
 
     def on_error(self, ws, error):
         self.errors.append(error)
 
     def on_open(self, ws):
         self.is_open = True
 
     def on_close(self, ws, close_status_code, close_message):
         pass
 
     def on_ping(self, ws, message):
         pass
 
     def on_pong(self, ws, message):
         pass
 
     def recv(self):
         recv_timeout = time.time() + self.timeout
         while len(self.messages) == 0:
             if time.time() > recv_timeout:
                 raise TimeoutError(
                     f"No message received from {self.ws_url} after {self.timeout}s"
                 )
         return self.messages.pop(0)
 
     def send_bytes(self, data: bytes) -> None:
         self.ws.send(data, websocket.ABNF.OPCODE_BINARY)
 
     def sub_to_blocks(self, *, is_unsub=False) -> None:
         sub = pb.WsSub(is_unsub=is_unsub, blocks=pb.WsSubBlocks())
         self.send_bytes(sub.SerializeToString())
 
     def sub_script(self, script_type: str, payload: bytes, *, is_unsub=False) -> None:
         sub = pb.WsSub(
             is_unsub=is_unsub,
             script=pb.WsSubScript(script_type=script_type, payload=payload),
         )
         self.send_bytes(sub.SerializeToString())
 
     def close(self):
         self.ws.close()
         self.ws_thread.join(self.timeout)
 
 
 class ChronikClient:
     CONTENT_TYPE = "application/x-protobuf"
 
     def __init__(
         self, host: str, port: int, https=False, timeout=DEFAULT_TIMEOUT
     ) -> None:
         self.host = host
         self.port = port
         self.timeout = timeout
         self.https = https
 
     def _request_get(self, path: str, pb_type):
+        return self._request("GET", path, None, pb_type)
+
+    def _request(self, method: str, path: str, body: Optional[bytes], pb_type):
         kwargs = {}
         if self.timeout is not None:
             kwargs["timeout"] = self.timeout
         client = (
             http.client.HTTPSConnection(self.host, self.port, **kwargs)
             if self.https
             else http.client.HTTPConnection(self.host, self.port, **kwargs)
         )
-        client.request("GET", path)
+        headers = {}
+        if body is not None:
+            headers["Content-Type"] = self.CONTENT_TYPE
+        client.request(method, path, body, headers)
         response = client.getresponse()
         content_type = response.getheader("Content-Type")
         body = response.read()
 
         if content_type != self.CONTENT_TYPE:
             raise UnexpectedContentType(
                 f'Unexpected Content-Type "{content_type}" (expected '
                 f'"{self.CONTENT_TYPE}"), body: {repr(body)}'
             )
 
         if response.status != 200:
             proto_error = pb.Error()
             proto_error.ParseFromString(body)
             return ChronikResponse(response.status, error_proto=proto_error)
 
         ok_proto = pb_type()
         ok_proto.ParseFromString(body)
         return ChronikResponse(response.status, ok_proto=ok_proto)
 
     def blockchain_info(self) -> ChronikResponse:
         return self._request_get("/blockchain-info", pb.BlockchainInfo)
 
     def block(self, hash_or_height: Union[str, int]) -> ChronikResponse:
         return self._request_get(f"/block/{hash_or_height}", pb.Block)
 
     def block_txs(
         self, hash_or_height: Union[str, int], page=None, page_size=None
     ) -> ChronikResponse:
         query = _page_query_params(page, page_size)
         return self._request_get(
             f"/block-txs/{hash_or_height}{query}", pb.TxHistoryPage
         )
 
     def blocks(self, start_height: int, end_height: int) -> ChronikResponse:
         return self._request_get(f"/blocks/{start_height}/{end_height}", pb.Blocks)
 
     def chronik_info(self) -> ChronikResponse:
         return self._request_get("/chronik-info", pb.ChronikInfo)
 
     def tx(self, txid: str) -> ChronikResponse:
         return self._request_get(f"/tx/{txid}", pb.Tx)
 
     def raw_tx(self, txid: str) -> bytes:
         return self._request_get(f"/raw-tx/{txid}", pb.RawTx)
 
     def token_info(self, txid: str) -> bytes:
         return self._request_get(f"/token/{txid}", pb.TokenInfo)
 
+    def broadcast_tx(
+        self, raw_tx: bytes, skip_token_checks: bool = False
+    ) -> ChronikResponse:
+        return self._request(
+            "POST",
+            "/broadcast-tx",
+            pb.BroadcastTxRequest(
+                raw_tx=raw_tx, skip_token_checks=skip_token_checks
+            ).SerializeToString(),
+            pb.BroadcastTxResponse,
+        )
+
+    def broadcast_txs(
+        self, raw_txs: List[bytes], skip_token_checks: bool = False
+    ) -> ChronikResponse:
+        return self._request(
+            "POST",
+            "/broadcast-txs",
+            pb.BroadcastTxsRequest(
+                raw_txs=raw_txs, skip_token_checks=skip_token_checks
+            ).SerializeToString(),
+            pb.BroadcastTxsResponse,
+        )
+
     def script(self, script_type: str, script_payload: str) -> ChronikScriptClient:
         return ChronikScriptClient(self, script_type, script_payload)
 
     def pause(self) -> ChronikResponse:
         return self._request_get("/pause", pb.Empty)
 
     def resume(self) -> ChronikResponse:
         return self._request_get("/resume", pb.Empty)
 
     def ws(self, **kwargs) -> ChronikWs:
         return ChronikWs(self, **kwargs)
 
 
 def _page_query_params(page=None, page_size=None) -> str:
     if page is not None and page_size is not None:
         return f"?page={page}&page_size={page_size}"
     elif page is not None:
         return f"?page={page}"
     elif page_size is not None:
         return f"?page_size={page_size}"
     else:
         return ""
diff --git a/test/functional/test_framework/chronik/token_tx.py b/test/functional/test_framework/chronik/token_tx.py
index 8f4a73313..851cdb83e 100644
--- a/test/functional/test_framework/chronik/token_tx.py
+++ b/test/functional/test_framework/chronik/token_tx.py
@@ -1,66 +1,73 @@
 #!/usr/bin/env python3
 # Copyright (c) 2024 The Bitcoin developers
 # Distributed under the MIT software license, see the accompanying
 # file COPYING or http://www.opensource.org/licenses/mit-license.php.
 
 from test_framework.messages import CTransaction
 from test_framework.util import assert_equal
 
 
 class TokenTx:
     def __init__(
         self,
         *,
         tx: CTransaction,
         status=0,
         entries=[],
         inputs=[],
         outputs=[],
         failed_parsings=[],
         token_info=None,
     ):
         tx.rehash()
         self.tx = tx
         self.txid = tx.hash
         self.status = status
         self.entries = entries
         self.inputs = inputs
         self.outputs = outputs
         self.failed_parsings = failed_parsings
         self.token_info = token_info
 
-    def send(self, node):
-        node.sendrawtransaction(self.tx.serialize().hex())
+    def send(self, chronik, error=None):
+        raw_tx = self.tx.serialize()
+        request = chronik.broadcast_tx(raw_tx)
+        if error is None:
+            request.ok()
+        else:
+            actual_error = request.err(400)
+            assert_equal(actual_error.msg, error)
+            chronik.broadcast_tx(raw_tx, skip_token_checks=True).ok()
 
     def test(self, chronik, block_hash=None):
         proto_tx = chronik.tx(self.txid).ok()
         self.test_tx(proto_tx, block_hash)
         if self.token_info is not None:
             proto_token = chronik.token_info(self.txid).ok()
             self.test_token_info(proto_token, block_hash)
         else:
             chronik.token_info(self.txid).err(404)
 
     def test_tx(self, proto_tx, block_hash=None):
         import chronik_pb2 as pb
 
         assert_equal(proto_tx.token_status, self.status)
         assert_equal(list(proto_tx.token_entries), self.entries)
         assert_equal([tx_input.token for tx_input in proto_tx.inputs], self.inputs)
         assert_equal([tx_output.token for tx_output in proto_tx.outputs], self.outputs)
         if block_hash is None:
             assert_equal(proto_tx.block, pb.BlockMetadata())
         else:
             assert_equal(proto_tx.block.hash, bytes.fromhex(block_hash)[::-1])
         assert_equal(list(proto_tx.token_failed_parsings), self.failed_parsings)
 
     def test_token_info(self, proto_token, block_hash=None):
         import chronik_pb2 as pb
 
         assert_equal(proto_token.token_id, self.token_info.token_id)
         assert_equal(proto_token.token_type, self.token_info.token_type)
         assert_equal(proto_token.genesis_info, self.token_info.genesis_info)
         if block_hash is not None:
             assert_equal(proto_token.block.hash, bytes.fromhex(block_hash)[::-1])
         else:
             assert_equal(proto_token.block, pb.BlockMetadata())