diff --git a/chronik/chronik-db/src/plugins/io.rs b/chronik/chronik-db/src/plugins/io.rs
--- a/chronik/chronik-db/src/plugins/io.rs
+++ b/chronik/chronik-db/src/plugins/io.rs
@@ -252,6 +252,10 @@
         batch: &mut WriteBatch,
         txs: &[IndexTx<'_>],
     ) -> Result<()> {
+        if self.ctx.plugins().is_empty() {
+            return Ok(());
+        }
+
         let plugin_outputs =
             self.col.fetch_plugin_outputs(txs.iter().flat_map(|tx| {
                 (0..tx.tx.outputs.len()).map(|out_idx| {
@@ -340,6 +344,15 @@
         };
         Ok(Some(db_deserialize::<PluginOutput>(&output)?))
     }
+
+    /// Read all the given outpoints by [`DbOutpoint`] and return them as map by
+    /// [`OutPoint`].
+    pub fn plugin_outputs(
+        &self,
+        outpoints: impl IntoIterator<Item = (OutPoint, TxNum)> + Clone,
+    ) -> Result<BTreeMap<OutPoint, PluginOutput>> {
+        self.col.fetch_plugin_outputs(outpoints)
+    }
 }
 
 impl std::fmt::Debug for PluginsCol<'_> {
diff --git a/chronik/chronik-indexer/src/indexer.rs b/chronik/chronik-indexer/src/indexer.rs
--- a/chronik/chronik-indexer/src/indexer.rs
+++ b/chronik/chronik-indexer/src/indexer.rs
@@ -23,10 +23,12 @@
         prepare_indexed_txs_cached, PrepareUpdateMode, TxNumCacheSettings,
     },
     io::{
-        merge, token::TokenWriter, BlockHeight, BlockReader, BlockStatsWriter,
-        BlockTxs, BlockWriter, DbBlock, GroupHistoryMemData, GroupUtxoMemData,
-        MetadataReader, MetadataWriter, SchemaVersion, SpentByWriter, TxEntry,
-        TxReader, TxWriter,
+        merge,
+        token::{ProcessedTokenTxBatch, TokenWriter},
+        BlockHeight, BlockReader, BlockStatsWriter, BlockTxs, BlockWriter,
+        DbBlock, GroupHistoryMemData, GroupUtxoMemData, MetadataReader,
+        MetadataWriter, SchemaVersion, SpentByWriter, TxEntry, TxReader,
+        TxWriter,
     },
     mem::{MemData, MemDataConf, Mempool, MempoolTx},
     plugins::{PluginMeta, PluginsReader, PluginsWriter},
@@ -548,8 +550,9 @@
             )?;
         }
         let token_id_aux;
+        let processed_token_batch;
         if self.is_token_index_enabled {
-            let processed_token_batch =
+            processed_token_batch =
                 token_writer.insert(&mut batch, &index_txs)?;
             token_id_aux =
                 TokenIdGroupAux::from_batch(&index_txs, &processed_token_batch);
@@ -566,8 +569,15 @@
                 &mut GroupUtxoMemData::default(),
             )?;
         } else {
+            processed_token_batch = ProcessedTokenTxBatch::default();
             token_id_aux = TokenIdGroupAux::default();
         }
+        plugins_writer.insert(
+            &mut batch,
+            &index_txs,
+            &processed_token_batch,
+            &self.plugin_name_map,
+        )?;
         plugins_writer.update_sync_height(
             &mut batch,
             block.db_block.height,
@@ -672,6 +682,7 @@
             )?;
             token_writer.delete(&mut batch, &index_txs)?;
         }
+        plugins_writer.delete(&mut batch, &index_txs)?;
         plugins_writer.update_sync_height(
             &mut batch,
             block.db_block.height - 1,
@@ -732,6 +743,7 @@
             mempool: &self.mempool,
             node,
             is_token_index_enabled: self.is_token_index_enabled,
+            plugin_name_map: &self.plugin_name_map,
         }
     }
 
@@ -743,6 +755,7 @@
             mempool: &self.mempool,
             node,
             is_token_index_enabled: self.is_token_index_enabled,
+            plugin_name_map: &self.plugin_name_map,
         }
     }
 
@@ -754,6 +767,7 @@
             mempool: &self.mempool,
             node,
             is_token_index_enabled: self.is_token_index_enabled,
+            plugin_name_map: &self.plugin_name_map,
         }
     }
 
@@ -771,6 +785,7 @@
             group: self.script_group.clone(),
             node,
             is_token_index_enabled: self.is_token_index_enabled,
+            plugin_name_map: &self.plugin_name_map,
         })
     }
 
@@ -803,6 +818,7 @@
             group: TokenIdGroup,
             node,
             is_token_index_enabled: self.is_token_index_enabled,
+            plugin_name_map: &self.plugin_name_map,
         }
     }
 
@@ -835,6 +851,7 @@
             group: LokadIdGroup,
             node,
             is_token_index_enabled: self.is_token_index_enabled,
+            plugin_name_map: &self.plugin_name_map,
         }
     }
 
diff --git a/chronik/chronik-indexer/src/query/blocks.rs b/chronik/chronik-indexer/src/query/blocks.rs
--- a/chronik/chronik-indexer/src/query/blocks.rs
+++ b/chronik/chronik-indexer/src/query/blocks.rs
@@ -17,13 +17,17 @@
     },
     mem::Mempool,
 };
+use chronik_plugin::data::PluginNameMap;
 use chronik_proto::proto;
 use thiserror::Error;
 
 use crate::{
     avalanche::Avalanche,
     indexer::Node,
-    query::{make_tx_proto, HashOrHeight, OutputsSpent, TxTokenData},
+    query::{
+        make_tx_proto, read_plugin_outputs, HashOrHeight, MakeTxProtoParams,
+        OutputsSpent, TxTokenData,
+    },
 };
 
 const MAX_BLOCKS_PAGE_SIZE: usize = 500;
@@ -46,6 +50,8 @@
     pub node: &'a Node,
     /// Whether the SLP/ALP token index is enabled
     pub is_token_index_enabled: bool,
+    /// Map plugin name <-> plugin idx of all loaded plugins
+    pub plugin_name_map: &'a PluginNameMap,
 }
 
 /// Errors indicating something went wrong with querying blocks.
@@ -221,15 +227,23 @@
                 &tx,
                 self.is_token_index_enabled,
             )?;
-            txs.push(make_tx_proto(
+            let plugin_outputs = read_plugin_outputs(
+                self.db,
                 &tx,
-                &outputs_spent,
-                db_tx.entry.time_first_seen,
-                db_tx.entry.is_coinbase,
-                Some(&db_block),
-                self.avalanche,
-                token.as_ref(),
-            ));
+                Some(tx_num),
+                !self.plugin_name_map.is_empty(),
+            )?;
+            txs.push(make_tx_proto(MakeTxProtoParams {
+                tx: &tx,
+                outputs_spent: &outputs_spent,
+                time_first_seen: db_tx.entry.time_first_seen,
+                is_coinbase: db_tx.entry.is_coinbase,
+                block: Some(&db_block),
+                avalanche: self.avalanche,
+                token: token.as_ref(),
+                plugin_outputs: &plugin_outputs,
+                plugin_name_map: self.plugin_name_map,
+            }));
         }
         let total_num_txs = (tx_range.end - tx_range.start) as usize;
         let total_num_pages =
diff --git a/chronik/chronik-indexer/src/query/broadcast.rs b/chronik/chronik-indexer/src/query/broadcast.rs
--- a/chronik/chronik-indexer/src/query/broadcast.rs
+++ b/chronik/chronik-indexer/src/query/broadcast.rs
@@ -11,13 +11,17 @@
 use bytes::Bytes;
 use chronik_bridge::ffi;
 use chronik_db::{db::Db, mem::Mempool};
+use chronik_plugin::data::PluginNameMap;
 use chronik_proto::proto;
 use thiserror::Error;
 
 use crate::{
     avalanche::Avalanche,
     indexer::Node,
-    query::{make_tx_proto, OutputsSpent, QueryBroadcastError::*, TxTokenData},
+    query::{
+        make_tx_proto, read_plugin_outputs, MakeTxProtoParams, OutputsSpent,
+        QueryBroadcastError::*, TxTokenData,
+    },
 };
 
 /// Struct for broadcasting txs on the network
@@ -33,6 +37,8 @@
     pub node: &'a Node,
     /// Whether the SLP/ALP token index is enabled
     pub is_token_index_enabled: bool,
+    /// Map plugin name <-> plugin idx of all loaded plugins
+    pub plugin_name_map: &'a PluginNameMap,
 }
 
 /// Errors indicating something went wrong with reading txs.
@@ -151,14 +157,22 @@
         } else {
             None
         };
-        Ok(make_tx_proto(
+        let plugin_outputs = read_plugin_outputs(
+            self.db,
             &tx,
-            &OutputsSpent::default(),
-            0,
-            false,
             None,
-            self.avalanche,
-            token.as_ref(),
-        ))
+            !self.plugin_name_map.is_empty(),
+        )?;
+        Ok(make_tx_proto(MakeTxProtoParams {
+            tx: &tx,
+            outputs_spent: &OutputsSpent::default(),
+            time_first_seen: 0,
+            is_coinbase: false,
+            block: None,
+            avalanche: self.avalanche,
+            token: token.as_ref(),
+            plugin_outputs: &plugin_outputs,
+            plugin_name_map: self.plugin_name_map,
+        }))
     }
 }
diff --git a/chronik/chronik-indexer/src/query/group_history.rs b/chronik/chronik-indexer/src/query/group_history.rs
--- a/chronik/chronik-indexer/src/query/group_history.rs
+++ b/chronik/chronik-indexer/src/query/group_history.rs
@@ -14,6 +14,7 @@
     io::{BlockReader, GroupHistoryReader, SpentByReader, TxNum, TxReader},
     mem::{Mempool, MempoolGroupHistory},
 };
+use chronik_plugin::data::PluginNameMap;
 use chronik_proto::proto;
 use chronik_util::log;
 use thiserror::Error;
@@ -21,7 +22,10 @@
 use crate::{
     avalanche::Avalanche,
     indexer::Node,
-    query::{make_tx_proto, OutputsSpent, TxTokenData},
+    query::{
+        make_tx_proto, read_plugin_outputs, MakeTxProtoParams, OutputsSpent,
+        TxTokenData,
+    },
 };
 
 /// Smallest allowed page size
@@ -48,6 +52,8 @@
     pub node: &'a Node,
     /// Whether the SLP/ALP token index is enabled
     pub is_token_index_enabled: bool,
+    /// Map plugin name <-> plugin idx of all loaded plugins
+    pub plugin_name_map: &'a PluginNameMap,
 }
 
 /// Errors indicating something went wrong with reading txs.
@@ -264,18 +270,28 @@
             .take(request_page_size);
         for (_, txid) in page_mempool_txs_iter {
             let entry = self.mempool.tx(txid).ok_or(MissingMempoolTx(*txid))?;
-            page_txs.push(make_tx_proto(
-                &entry.tx,
-                &OutputsSpent::new_mempool(
+            page_txs.push(make_tx_proto(MakeTxProtoParams {
+                tx: &entry.tx,
+                outputs_spent: &OutputsSpent::new_mempool(
                     self.mempool.spent_by().outputs_spent(txid),
                 ),
-                entry.time_first_seen,
-                false,
-                None,
-                self.avalanche,
-                TxTokenData::from_mempool(self.mempool.tokens(), &entry.tx)
-                    .as_ref(),
-            ));
+                time_first_seen: entry.time_first_seen,
+                is_coinbase: false,
+                block: None,
+                avalanche: self.avalanche,
+                token: TxTokenData::from_mempool(
+                    self.mempool.tokens(),
+                    &entry.tx,
+                )
+                .as_ref(),
+                plugin_outputs: &read_plugin_outputs(
+                    self.db,
+                    &entry.tx,
+                    None,
+                    !self.plugin_name_map.is_empty(),
+                )?,
+                plugin_name_map: self.plugin_name_map,
+            }));
         }
 
         // If we filled up the page with mempool txs, or there's no DB txs on
@@ -348,21 +364,28 @@
                 .map(|(_, txid)| -> Result<_> {
                     let entry =
                         self.mempool.tx(txid).ok_or(MissingMempoolTx(*txid))?;
-                    Ok(make_tx_proto(
-                        &entry.tx,
-                        &OutputsSpent::new_mempool(
+                    Ok(make_tx_proto(MakeTxProtoParams {
+                        tx: &entry.tx,
+                        outputs_spent: &OutputsSpent::new_mempool(
                             self.mempool.spent_by().outputs_spent(txid),
                         ),
-                        entry.time_first_seen,
-                        false,
-                        None,
-                        self.avalanche,
-                        TxTokenData::from_mempool(
+                        time_first_seen: entry.time_first_seen,
+                        is_coinbase: false,
+                        block: None,
+                        avalanche: self.avalanche,
+                        token: TxTokenData::from_mempool(
                             self.mempool.tokens(),
                             &entry.tx,
                         )
                         .as_ref(),
-                    ))
+                        plugin_outputs: &read_plugin_outputs(
+                            self.db,
+                            &entry.tx,
+                            None,
+                            !self.plugin_name_map.is_empty(),
+                        )?,
+                        plugin_name_map: self.plugin_name_map,
+                    }))
                 })
                 .collect::<Result<Vec<_>>>()?,
             None => vec![],
@@ -400,14 +423,22 @@
             &tx,
             self.is_token_index_enabled,
         )?;
-        Ok(make_tx_proto(
+        let plugin_outputs = read_plugin_outputs(
+            self.db,
             &tx,
-            &outputs_spent,
-            block_tx.entry.time_first_seen,
-            block_tx.entry.is_coinbase,
-            Some(&block),
-            self.avalanche,
-            token.as_ref(),
-        ))
+            Some(tx_num),
+            !self.plugin_name_map.is_empty(),
+        )?;
+        Ok(make_tx_proto(MakeTxProtoParams {
+            tx: &tx,
+            outputs_spent: &outputs_spent,
+            time_first_seen: block_tx.entry.time_first_seen,
+            is_coinbase: block_tx.entry.is_coinbase,
+            block: Some(&block),
+            avalanche: self.avalanche,
+            token: token.as_ref(),
+            plugin_outputs: &plugin_outputs,
+            plugin_name_map: self.plugin_name_map,
+        }))
     }
 }
diff --git a/chronik/chronik-indexer/src/query/mod.rs b/chronik/chronik-indexer/src/query/mod.rs
--- a/chronik/chronik-indexer/src/query/mod.rs
+++ b/chronik/chronik-indexer/src/query/mod.rs
@@ -8,6 +8,7 @@
 mod broadcast;
 mod group_history;
 mod group_utxos;
+mod plugins;
 mod tx_token_data;
 mod txs;
 mod util;
@@ -16,6 +17,7 @@
 pub use self::broadcast::*;
 pub use self::group_history::*;
 pub use self::group_utxos::*;
+pub(crate) use self::plugins::*;
 pub use self::tx_token_data::*;
 pub use self::txs::*;
 pub use self::util::*;
diff --git a/chronik/chronik-indexer/src/query/plugins.rs b/chronik/chronik-indexer/src/query/plugins.rs
new file mode 100644
--- /dev/null
+++ b/chronik/chronik-indexer/src/query/plugins.rs
@@ -0,0 +1,69 @@
+// Copyright (c) 2024 The Bitcoin developers
+// Distributed under the MIT software license, see the accompanying
+// file COPYING or http://www.opensource.org/licenses/mit-license.php.
+
+use std::collections::{BTreeMap, HashMap};
+
+use abc_rust_error::Result;
+use bitcoinsuite_core::tx::{OutPoint, Tx};
+use chronik_db::{
+    db::Db,
+    io::{TxNum, TxReader},
+    plugins::PluginsReader,
+};
+use chronik_plugin::data::{PluginNameMap, PluginOutput};
+use chronik_proto::proto;
+
+pub(crate) fn read_plugin_outputs(
+    db: &Db,
+    tx: &Tx,
+    tx_num: Option<TxNum>,
+    has_any_plugins: bool,
+) -> Result<BTreeMap<OutPoint, PluginOutput>> {
+    if !has_any_plugins {
+        return Ok(BTreeMap::new());
+    }
+    let tx_reader = TxReader::new(db)?;
+    let mut outpoints = Vec::new();
+    for input in &tx.inputs {
+        let Some(input_tx_num) =
+            tx_reader.tx_num_by_txid(&input.prev_out.txid)?
+        else {
+            continue;
+        };
+        outpoints.push((input.prev_out, input_tx_num));
+    }
+    let plugin_reader = PluginsReader::new(db)?;
+    if let Some(tx_num) = tx_num {
+        for out_idx in 0..tx.outputs.len() {
+            outpoints.push((
+                OutPoint {
+                    txid: tx.txid(),
+                    out_idx: out_idx as u32,
+                },
+                tx_num,
+            ));
+        }
+    }
+    plugin_reader.plugin_outputs(outpoints)
+}
+
+pub(crate) fn make_plugins_proto(
+    plugin_output: &PluginOutput,
+    plugin_name_map: &PluginNameMap,
+) -> HashMap<String, proto::PluginEntry> {
+    plugin_output
+        .plugins
+        .iter()
+        .filter_map(|(&plugin_idx, entry)| {
+            Some((
+                // Skip plugins that aren't loaded
+                plugin_name_map.name_by_idx(plugin_idx)?.to_string(),
+                proto::PluginEntry {
+                    groups: entry.groups.clone(),
+                    data: entry.data.clone(),
+                },
+            ))
+        })
+        .collect()
+}
diff --git a/chronik/chronik-indexer/src/query/txs.rs b/chronik/chronik-indexer/src/query/txs.rs
--- a/chronik/chronik-indexer/src/query/txs.rs
+++ b/chronik/chronik-indexer/src/query/txs.rs
@@ -14,6 +14,7 @@
     io::{BlockReader, SpentByReader, TxReader},
     mem::Mempool,
 };
+use chronik_plugin::data::PluginNameMap;
 use chronik_proto::proto;
 use thiserror::Error;
 
@@ -22,7 +23,8 @@
     indexer::Node,
     query::{
         make_genesis_info_proto, make_token_type_proto, make_tx_proto,
-        read_token_info, OutputsSpent, QueryTxError::*, TxTokenData,
+        read_plugin_outputs, read_token_info, MakeTxProtoParams, OutputsSpent,
+        QueryTxError::*, TxTokenData,
     },
 };
 
@@ -39,6 +41,8 @@
     pub node: &'a Node,
     /// Whether the SLP/ALP token index is enabled
     pub is_token_index_enabled: bool,
+    /// Map plugin name <-> plugin idx of all loaded plugins
+    pub plugin_name_map: &'a PluginNameMap,
 }
 
 /// Errors indicating something went wrong with reading txs.
@@ -65,18 +69,25 @@
     /// Query a tx by txid from the mempool or DB.
     pub fn tx_by_id(&self, txid: TxId) -> Result<proto::Tx> {
         match self.mempool.tx(&txid) {
-            Some(tx) => Ok(make_tx_proto(
-                &tx.tx,
-                &OutputsSpent::new_mempool(
+            Some(tx) => Ok(make_tx_proto(MakeTxProtoParams {
+                tx: &tx.tx,
+                outputs_spent: &OutputsSpent::new_mempool(
                     self.mempool.spent_by().outputs_spent(&txid),
                 ),
-                tx.time_first_seen,
-                false,
-                None,
-                self.avalanche,
-                TxTokenData::from_mempool(self.mempool.tokens(), &tx.tx)
+                time_first_seen: tx.time_first_seen,
+                is_coinbase: false,
+                block: None,
+                avalanche: self.avalanche,
+                token: TxTokenData::from_mempool(self.mempool.tokens(), &tx.tx)
                     .as_ref(),
-            )),
+                plugin_outputs: &read_plugin_outputs(
+                    self.db,
+                    &tx.tx,
+                    None,
+                    !self.plugin_name_map.is_empty(),
+                )?,
+                plugin_name_map: self.plugin_name_map,
+            })),
             None => {
                 let tx_reader = TxReader::new(self.db)?;
                 let (tx_num, block_tx) = tx_reader
@@ -110,15 +121,22 @@
                     &tx,
                     self.is_token_index_enabled,
                 )?;
-                Ok(make_tx_proto(
-                    &tx,
-                    &outputs_spent,
-                    tx_entry.time_first_seen,
-                    tx_entry.is_coinbase,
-                    Some(&block),
-                    self.avalanche,
-                    token.as_ref(),
-                ))
+                Ok(make_tx_proto(MakeTxProtoParams {
+                    tx: &tx,
+                    outputs_spent: &outputs_spent,
+                    time_first_seen: tx_entry.time_first_seen,
+                    is_coinbase: tx_entry.is_coinbase,
+                    block: Some(&block),
+                    avalanche: self.avalanche,
+                    token: token.as_ref(),
+                    plugin_outputs: &read_plugin_outputs(
+                        self.db,
+                        &tx,
+                        Some(tx_num),
+                        !self.plugin_name_map.is_empty(),
+                    )?,
+                    plugin_name_map: self.plugin_name_map,
+                }))
             }
         }
     }
diff --git a/chronik/chronik-indexer/src/query/util.rs b/chronik/chronik-indexer/src/query/util.rs
--- a/chronik/chronik-indexer/src/query/util.rs
+++ b/chronik/chronik-indexer/src/query/util.rs
@@ -16,12 +16,13 @@
 use chronik_db::io::{
     BlockHeight, DbBlock, SpentByEntry, SpentByReader, TxNum, TxReader,
 };
+use chronik_plugin::data::{PluginNameMap, PluginOutput};
 use chronik_proto::proto;
 use thiserror::Error;
 
 use crate::{
     avalanche::Avalanche,
-    query::{QueryUtilError::*, TxTokenData},
+    query::{make_plugins_proto, QueryUtilError::*, TxTokenData},
 };
 
 /// Errors indicating something went wrong with reading txs.
@@ -44,16 +45,21 @@
     NotHashOrHeight(String),
 }
 
+pub(crate) struct MakeTxProtoParams<'a> {
+    pub(crate) tx: &'a Tx,
+    pub(crate) outputs_spent: &'a OutputsSpent<'a>,
+    pub(crate) time_first_seen: i64,
+    pub(crate) is_coinbase: bool,
+    pub(crate) block: Option<&'a DbBlock>,
+    pub(crate) avalanche: &'a Avalanche,
+    pub(crate) token: Option<&'a TxTokenData<'a>>,
+    pub(crate) plugin_outputs: &'a BTreeMap<OutPoint, PluginOutput>,
+    pub(crate) plugin_name_map: &'a PluginNameMap,
+}
+
 /// Make a [`proto::Tx`].
-pub(crate) fn make_tx_proto(
-    tx: &Tx,
-    outputs_spent: &OutputsSpent<'_>,
-    time_first_seen: i64,
-    is_coinbase: bool,
-    block: Option<&DbBlock>,
-    avalanche: &Avalanche,
-    token: Option<&TxTokenData<'_>>,
-) -> proto::Tx {
+pub(crate) fn make_tx_proto(params: MakeTxProtoParams<'_>) -> proto::Tx {
+    let tx = params.tx;
     proto::Tx {
         txid: tx.txid().to_vec(),
         version: tx.version,
@@ -74,8 +80,19 @@
                     output_script,
                     value,
                     sequence_no: input.sequence,
-                    token: token
+                    token: params
+                        .token
                         .and_then(|token| token.input_token_proto(input_idx)),
+                    plugins: params
+                        .plugin_outputs
+                        .get(&input.prev_out)
+                        .map(|plugin_output| {
+                            make_plugins_proto(
+                                plugin_output,
+                                params.plugin_name_map,
+                            )
+                        })
+                        .unwrap_or_default(),
                 }
             })
             .collect(),
@@ -86,22 +103,39 @@
             .map(|(output_idx, output)| proto::TxOutput {
                 value: output.value,
                 output_script: output.script.to_vec(),
-                spent_by: outputs_spent
+                spent_by: params
+                    .outputs_spent
                     .spent_by(output_idx as u32)
                     .map(|spent_by| make_spent_by_proto(&spent_by)),
-                token: token
+                token: params
+                    .token
                     .and_then(|token| token.output_token_proto(output_idx)),
+                plugins: params
+                    .plugin_outputs
+                    .get(&OutPoint {
+                        txid: tx.txid(),
+                        out_idx: output_idx as u32,
+                    })
+                    .map(|plugin_output| {
+                        make_plugins_proto(
+                            plugin_output,
+                            params.plugin_name_map,
+                        )
+                    })
+                    .unwrap_or_default(),
             })
             .collect(),
         lock_time: tx.locktime,
-        block: block.map(|block| proto::BlockMetadata {
+        block: params.block.map(|block| proto::BlockMetadata {
             hash: block.hash.to_vec(),
             height: block.height,
             timestamp: block.timestamp,
-            is_final: avalanche.is_final_height(block.height),
+            is_final: params.avalanche.is_final_height(block.height),
         }),
-        token_entries: token.map_or(vec![], |token| token.entries_proto()),
-        token_failed_parsings: token.map_or(vec![], |token| {
+        token_entries: params
+            .token
+            .map_or(vec![], |token| token.entries_proto()),
+        token_failed_parsings: params.token.map_or(vec![], |token| {
             token
                 .tx
                 .failed_parsings
@@ -115,7 +149,7 @@
                 })
                 .collect()
         }),
-        token_status: match token {
+        token_status: match params.token {
             Some(token) => {
                 if token.tx.failed_parsings.is_empty()
                     && token.tx.entries.iter().all(|entry| entry.is_normal())
@@ -127,9 +161,9 @@
             }
             None => proto::TokenStatus::NonToken as _,
         },
-        time_first_seen,
+        time_first_seen: params.time_first_seen,
         size: tx.ser_len() as u32,
-        is_coinbase,
+        is_coinbase: params.is_coinbase,
     }
 }
 
diff --git a/chronik/chronik-plugin-common/src/data.rs b/chronik/chronik-plugin-common/src/data.rs
--- a/chronik/chronik-plugin-common/src/data.rs
+++ b/chronik/chronik-plugin-common/src/data.rs
@@ -60,4 +60,9 @@
     pub fn idx_by_name(&self, name: &str) -> Option<PluginIdx> {
         self.plugins_name.get_by_right(name).copied()
     }
+
+    /// Whether there's any plugins in the map
+    pub fn is_empty(&self) -> bool {
+        self.plugins_name.is_empty()
+    }
 }
diff --git a/chronik/chronik-proto/proto/chronik.proto b/chronik/chronik-proto/proto/chronik.proto
--- a/chronik/chronik-proto/proto/chronik.proto
+++ b/chronik/chronik-proto/proto/chronik.proto
@@ -157,6 +157,8 @@
     uint32 sequence_no = 5;
     // Token value attached to this input
     Token token = 8;
+    // Data attached to this output by plugins
+    map<string, PluginEntry> plugins = 9;
 }
 
 // CTxOut, creates a new coin.
@@ -169,6 +171,8 @@
     SpentBy spent_by = 4;
     // Token value attached to this output
     Token token = 5;
+    // Data attached to this output by plugins
+    map<string, PluginEntry> plugins = 6;
 }
 
 // Data about a block which a Tx is in.
@@ -338,6 +342,14 @@
     string error = 3;
 }
 
+// Data attached by a plugin to an output
+message PluginEntry {
+    // Groups assigned to this output
+    repeated bytes groups = 1;
+    // Data assigned to the output
+    repeated bytes data = 2;
+}
+
 // Page with txs
 message TxHistoryPage {
     // Txs of the page
diff --git a/test/functional/chronik_plugins.py b/test/functional/chronik_plugins.py
new file mode 100644
--- /dev/null
+++ b/test/functional/chronik_plugins.py
@@ -0,0 +1,229 @@
+# Copyright (c) 2024 The Bitcoin developers
+# Distributed under the MIT software license, see the accompanying
+# file COPYING or http://www.opensource.org/licenses/mit-license.php.
+"""
+Test Chronik runs plugins on txs.
+"""
+
+import os
+
+from test_framework.address import (
+    ADDRESS_ECREG_P2SH_OP_TRUE,
+    ADDRESS_ECREG_UNSPENDABLE,
+    P2SH_OP_TRUE,
+    SCRIPTSIG_OP_TRUE,
+)
+from test_framework.blocktools import COINBASE_MATURITY
+from test_framework.messages import COutPoint, CTransaction, CTxIn, CTxOut
+from test_framework.script import OP_RETURN, CScript
+from test_framework.test_framework import BitcoinTestFramework
+from test_framework.txtools import pad_tx
+from test_framework.util import assert_equal
+
+
+class ChronikPlugins(BitcoinTestFramework):
+    def set_test_params(self):
+        self.setup_clean_chain = True
+        self.num_nodes = 1
+        self.extra_args = [["-chronik"]]
+
+    def skip_test_if_missing_module(self):
+        self.skip_if_no_chronik_plugins()
+
+    def run_test(self):
+        from test_framework.chronik.client import pb
+
+        node = self.nodes[0]
+        chronik = node.get_chronik_client()
+
+        def assert_start_raises(*args, **kwargs):
+            node.assert_start_raises_init_error(["-chronik"], *args, **kwargs)
+
+        # Without a plugins.toml, setting up a plugin context is skipped
+        plugins_toml = os.path.join(node.datadir, "plugins.toml")
+        plugins_dir = os.path.join(node.datadir, "plugins")
+
+        # Plugin that colors outputs with the corresponding PUSHDATA of the OP_RETURN,
+        # concatenated with the existing plugin data of the corresponding input
+        with open(plugins_toml, "w", encoding="utf-8") as f:
+            print("[regtest.plugin.my_plugin]", file=f)
+        os.mkdir(plugins_dir)
+        plugin_module = os.path.join(plugins_dir, "my_plugin.py")
+        with open(plugin_module, "w", encoding="utf-8") as f:
+            print(
+                """
+from chronik_plugin.plugin import Plugin, PluginOutput
+
+class MyPluginPlugin(Plugin):
+    def lokad_id(self):
+        return b'TEST'
+
+    def version(self):
+        return '0.1.0'
+
+    def run(self, tx):
+        ops = tx.outputs[0].script.ops()
+        if ops[0].opcode != 0x6a: # OP_RETURN
+            return []
+        if ops[1].pushdata != b'TEST':
+            return []
+        outputs = []
+        for idx, (op, _) in enumerate(zip(ops[2:], tx.outputs[1:])):
+            data = [op.pushdata]
+            if idx < len(tx.inputs):
+                tx_input = tx.inputs[idx]
+                if 'my_plugin' in tx_input.plugin:
+                    data += tx_input.plugin['my_plugin'].data
+            outputs.append(
+                PluginOutput(idx=idx + 1, data=data, group=[])
+            )
+        return outputs
+""",
+                file=f,
+            )
+
+        with node.assert_debug_log(
+            [
+                "Plugin context initialized Python",
+                'Loaded plugin my_plugin.MyPluginPlugin (version 0.1.0) with LOKAD IDs [b"TEST"]',
+            ]
+        ):
+            self.restart_node(0, ["-chronik", "-chronikreindex"])
+
+        coinblockhash = self.generatetoaddress(node, 1, ADDRESS_ECREG_P2SH_OP_TRUE)[0]
+        coinblock = node.getblock(coinblockhash)
+        cointx = coinblock["tx"][0]
+
+        self.generatetoaddress(node, COINBASE_MATURITY, ADDRESS_ECREG_UNSPENDABLE)
+
+        coinvalue = 5000000000
+        tx1 = CTransaction()
+        tx1.vin = [CTxIn(COutPoint(int(cointx, 16), 0), SCRIPTSIG_OP_TRUE)]
+        tx1.vout = [
+            CTxOut(0, CScript([OP_RETURN, b"TEST", b"argo", b"alef", b"abc"])),
+            CTxOut(1000, P2SH_OP_TRUE),
+            CTxOut(1000, P2SH_OP_TRUE),
+            CTxOut(coinvalue - 10000, P2SH_OP_TRUE),
+        ]
+        pad_tx(tx1)
+        node.sendrawtransaction(tx1.serialize().hex())
+
+        # Plugins are currently not indexed for mempool txs
+        proto_tx1 = chronik.tx(tx1.hash).ok()
+        assert_equal([inpt.plugins for inpt in proto_tx1.inputs], [{}])
+        assert_equal([output.plugins for output in proto_tx1.outputs], [{}, {}, {}, {}])
+
+        tx2 = CTransaction()
+        tx2.vin = [CTxIn(COutPoint(tx1.sha256, 3), SCRIPTSIG_OP_TRUE)]
+        tx2.vout = [
+            CTxOut(0, CScript([OP_RETURN, b"TEST", b"blub", b"borg", b"bjork"])),
+            CTxOut(1000, P2SH_OP_TRUE),
+            CTxOut(1000, P2SH_OP_TRUE),
+            CTxOut(coinvalue - 20000, P2SH_OP_TRUE),
+        ]
+        pad_tx(tx2)
+        node.sendrawtransaction(tx2.serialize().hex())
+
+        # Plugins are currently not indexed for mempool txs
+        proto_tx2 = chronik.tx(tx2.hash).ok()
+        assert_equal([inpt.plugins for inpt in proto_tx2.inputs], [{}])
+        assert_equal([output.plugins for output in proto_tx2.outputs], [{}, {}, {}, {}])
+
+        # Mine tx1 and tx2
+        block1 = self.generatetoaddress(node, 1, ADDRESS_ECREG_UNSPENDABLE)[-1]
+
+        proto_tx1 = chronik.tx(tx1.hash).ok()
+        assert_equal([inpt.plugins for inpt in proto_tx1.inputs], [{}])
+        assert_equal(
+            [output.plugins for output in proto_tx1.outputs],
+            [
+                {},
+                {"my_plugin": pb.PluginEntry(data=[b"argo"])},
+                {"my_plugin": pb.PluginEntry(data=[b"alef"])},
+                {"my_plugin": pb.PluginEntry(data=[b"abc"])},
+            ],
+        )
+
+        proto_tx2 = chronik.tx(tx2.hash).ok()
+        assert_equal(
+            [inpt.plugins for inpt in proto_tx2.inputs],
+            [{"my_plugin": pb.PluginEntry(data=[b"abc"])}],
+        )
+        assert_equal(
+            [output.plugins for output in proto_tx2.outputs],
+            [
+                {},
+                {"my_plugin": pb.PluginEntry(data=[b"blub", b"abc"])},
+                {"my_plugin": pb.PluginEntry(data=[b"borg"])},
+                {"my_plugin": pb.PluginEntry(data=[b"bjork"])},
+            ],
+        )
+
+        tx3 = CTransaction()
+        tx3.vin = [
+            CTxIn(COutPoint(tx2.sha256, 1), SCRIPTSIG_OP_TRUE),
+            CTxIn(COutPoint(tx2.sha256, 3), SCRIPTSIG_OP_TRUE),
+        ]
+        tx3.vout = [
+            CTxOut(0, CScript([OP_RETURN, b"TEST", b"carp"])),
+            CTxOut(coinvalue - 30000, P2SH_OP_TRUE),
+        ]
+        pad_tx(tx3)
+        node.sendrawtransaction(tx3.serialize().hex())
+
+        # Mined outputs show up even when mempool tx spend them
+        proto_tx3 = chronik.tx(tx3.hash).ok()
+        assert_equal(
+            [inpt.plugins for inpt in proto_tx3.inputs],
+            [
+                {"my_plugin": pb.PluginEntry(data=[b"blub", b"abc"])},
+                {"my_plugin": pb.PluginEntry(data=[b"bjork"])},
+            ],
+        )
+        assert_equal([output.plugins for output in proto_tx3.outputs], [{}, {}])
+
+        # Mine tx3
+        block2 = self.generatetoaddress(node, 1, ADDRESS_ECREG_UNSPENDABLE)[-1]
+
+        proto_tx3 = chronik.tx(tx3.hash).ok()
+        assert_equal(
+            [inpt.plugins for inpt in proto_tx3.inputs],
+            [
+                {"my_plugin": pb.PluginEntry(data=[b"blub", b"abc"])},
+                {"my_plugin": pb.PluginEntry(data=[b"bjork"])},
+            ],
+        )
+        assert_equal(
+            [output.plugins for output in proto_tx3.outputs],
+            [{}, {"my_plugin": pb.PluginEntry(data=[b"carp", b"blub", b"abc"])}],
+        )
+
+        # Disconnect block2, outputs are now gone (inputs remain)
+        node.invalidateblock(block2)
+        proto_tx3 = chronik.tx(tx3.hash).ok()
+        assert_equal(
+            [inpt.plugins for inpt in proto_tx3.inputs],
+            [
+                {"my_plugin": pb.PluginEntry(data=[b"blub", b"abc"])},
+                {"my_plugin": pb.PluginEntry(data=[b"bjork"])},
+            ],
+        )
+        assert_equal([output.plugins for output in proto_tx3.outputs], [{}, {}])
+
+        # Disconnect block1, all outputs and inputs empty now
+        node.invalidateblock(block1)
+        proto_tx1 = chronik.tx(tx1.hash).ok()
+        assert_equal([inpt.plugins for inpt in proto_tx1.inputs], [{}])
+        assert_equal([output.plugins for output in proto_tx1.outputs], [{}, {}, {}, {}])
+
+        proto_tx2 = chronik.tx(tx2.hash).ok()
+        assert_equal([inpt.plugins for inpt in proto_tx2.inputs], [{}])
+        assert_equal([output.plugins for output in proto_tx2.outputs], [{}, {}, {}, {}])
+
+        proto_tx3 = chronik.tx(tx3.hash).ok()
+        assert_equal([inpt.plugins for inpt in proto_tx3.inputs], [{}, {}])
+        assert_equal([output.plugins for output in proto_tx3.outputs], [{}, {}])
+
+
+if __name__ == "__main__":
+    ChronikPlugins().main()