Page MenuHomePhabricator

D17650.id52583.diff
No OneTemporary

D17650.id52583.diff

This file is larger than 256 KB, so syntax highlighting was skipped.
diff --git a/chronik/bitcoinsuite-core/src/tx/tx.rs b/chronik/bitcoinsuite-core/src/tx/tx.rs
--- a/chronik/bitcoinsuite-core/src/tx/tx.rs
+++ b/chronik/bitcoinsuite-core/src/tx/tx.rs
@@ -95,7 +95,7 @@
#[derive(Clone, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct TxOutput {
/// Value of the output.
- pub value: i64,
+ pub sats: i64,
/// Script locking the output.
pub script: Script,
}
@@ -238,13 +238,13 @@
impl BitcoinSer for TxOutput {
fn ser_to<S: BitcoinSerializer>(&self, bytes: &mut S) {
- self.value.ser_to(bytes);
+ self.sats.ser_to(bytes);
self.script.ser_to(bytes);
}
fn deser(data: &mut bytes::Bytes) -> Result<Self, DataError> {
Ok(TxOutput {
- value: BitcoinSer::deser(data)?,
+ sats: BitcoinSer::deser(data)?,
script: BitcoinSer::deser(data)?,
})
}
@@ -304,7 +304,7 @@
coin: None,
}],
outputs: vec![TxOutput {
- value: 5000000000,
+ sats: 5000000000,
script: Script::new(
hex::decode(
"4104678afdb0fe5548271967f1a67130b7105cd6a828e03909a679\
diff --git a/chronik/bitcoinsuite-slp/src/alp/build.rs b/chronik/bitcoinsuite-slp/src/alp/build.rs
--- a/chronik/bitcoinsuite-slp/src/alp/build.rs
+++ b/chronik/bitcoinsuite-slp/src/alp/build.rs
@@ -11,7 +11,7 @@
alp::consts::ALP_LOKAD_ID,
consts::{BURN, GENESIS, MINT, SEND},
parsed::ParsedMintData,
- structs::{Amount, GenesisInfo},
+ structs::{Atoms, GenesisInfo},
token_id::TokenId,
token_type::AlpTokenType,
};
@@ -81,7 +81,7 @@
pub fn burn_section(
token_id: &TokenId,
token_type: AlpTokenType,
- amount: Amount,
+ atoms: Atoms,
) -> Bytes {
let mut section = BytesMut::new();
section.put_slice(&ALP_LOKAD_ID);
@@ -89,15 +89,15 @@
section.put_slice(&[BURN.len() as u8]);
section.put_slice(BURN);
section.put_slice(token_id.txid().hash().as_le_bytes());
- put_amount(&mut section, amount);
+ put_atoms(&mut section, atoms);
section.freeze()
}
/// Build an ALP SEND pushdata section
-pub fn send_section<I: ExactSizeIterator<Item = Amount>>(
+pub fn send_section<I: ExactSizeIterator<Item = Atoms>>(
token_id: &TokenId,
token_type: AlpTokenType,
- send_amounts: impl IntoIterator<Item = Amount, IntoIter = I>,
+ send_amounts: impl IntoIterator<Item = Atoms, IntoIter = I>,
) -> Bytes {
let mut section = BytesMut::new();
section.put_slice(&ALP_LOKAD_ID);
@@ -109,19 +109,19 @@
let send_amounts = send_amounts.into_iter();
section.put_slice(&[send_amounts.len() as u8]);
for send_amount in send_amounts {
- put_amount(&mut section, send_amount);
+ put_atoms(&mut section, send_amount);
}
section.freeze()
}
fn put_mint_data(section: &mut BytesMut, mint_data: &ParsedMintData) {
section.put_slice(&[mint_data.amounts.len() as u8]);
- for &amount in &mint_data.amounts {
- put_amount(section, amount);
+ for &atoms in &mint_data.amounts {
+ put_atoms(section, atoms);
}
section.put_slice(&[mint_data.num_batons as u8]);
}
-fn put_amount(section: &mut BytesMut, amount: Amount) {
- section.put_slice(&amount.to_le_bytes()[..6]);
+fn put_atoms(section: &mut BytesMut, atoms: Atoms) {
+ section.put_slice(&atoms.to_le_bytes()[..6]);
}
diff --git a/chronik/bitcoinsuite-slp/src/alp/parse.rs b/chronik/bitcoinsuite-slp/src/alp/parse.rs
--- a/chronik/bitcoinsuite-slp/src/alp/parse.rs
+++ b/chronik/bitcoinsuite-slp/src/alp/parse.rs
@@ -19,7 +19,7 @@
lokad_id::LokadId,
parsed::{ParsedData, ParsedGenesis, ParsedMintData, ParsedTxType},
slp::consts::SLP_LOKAD_ID,
- structs::{Amount, GenesisInfo, TokenMeta},
+ structs::{Atoms, GenesisInfo, TokenMeta},
token_id::TokenId,
token_type::{AlpTokenType, TokenType},
};
@@ -283,14 +283,14 @@
Ok(size.into())
}
-fn read_amount(pushdata: &mut Bytes) -> Result<Amount, ParseError> {
+fn read_amount(pushdata: &mut Bytes) -> Result<Atoms, ParseError> {
let amount6: [u8; 6] = read_array(pushdata)?;
let mut amount = [0u8; 8];
amount[..6].copy_from_slice(&amount6);
- Ok(Amount::from_le_bytes(amount))
+ Ok(Atoms::from_le_bytes(amount))
}
-fn read_amounts(pushdata: &mut Bytes) -> Result<Vec<Amount>, ParseError> {
+fn read_amounts(pushdata: &mut Bytes) -> Result<Vec<Atoms>, ParseError> {
let size = read_size(pushdata)?;
let mut amounts = Vec::with_capacity(size);
for _ in 0..size {
diff --git a/chronik/bitcoinsuite-slp/src/burn_summary.rs b/chronik/bitcoinsuite-slp/src/burn_summary.rs
--- a/chronik/bitcoinsuite-slp/src/burn_summary.rs
+++ b/chronik/bitcoinsuite-slp/src/burn_summary.rs
@@ -13,10 +13,10 @@
// Burning MINT batons can never be intentional
return true;
}
- if let Some(intentional_burn_amount) = self.intentional_burn_amount {
- intentional_burn_amount as u128 != self.actual_burn_amount
+ if let Some(intentional_burn_atoms) = self.intentional_burn_atoms {
+ intentional_burn_atoms as u128 != self.actual_burn_atoms
} else {
- self.actual_burn_amount > 0
+ self.actual_burn_atoms > 0
}
}
@@ -35,16 +35,14 @@
/// Create a human-readable summary of the burns of this entry.
pub fn burn_summary(&self) -> String {
if self.is_normal() {
- if let Some(burn_amount) = self.intentional_burn_amount {
- return format!(
- "OK: Intentional burn of {burn_amount} base tokens"
- );
+ if let Some(burn_atoms) = self.intentional_burn_atoms {
+ return format!("OK: Intentional burn of {burn_atoms} atoms");
} else {
return "OK: No burn".to_string();
}
}
let any_actual_burn =
- self.burns_mint_batons || self.actual_burn_amount != 0;
+ self.burns_mint_batons || self.actual_burn_atoms != 0;
if !any_actual_burn
&& (self.burn_error.is_some() || !self.failed_colorings.is_empty())
{
@@ -67,44 +65,38 @@
let mut s = "Unexpected burn: ".to_string();
if self.burns_mint_batons {
s.push_str("Burns mint baton(s)");
- if self.actual_burn_amount > 0 {
- s.push_str(&format!(
- " and {} base tokens",
- self.actual_burn_amount,
- ));
+ if self.actual_burn_atoms > 0 {
+ s.push_str(&format!(" and {} atoms", self.actual_burn_atoms,));
}
- } else if self.actual_burn_amount > 0 {
- s.push_str(&format!(
- "Burns {} base tokens",
- self.actual_burn_amount,
- ));
+ } else if self.actual_burn_atoms > 0 {
+ s.push_str(&format!("Burns {} atoms", self.actual_burn_atoms,));
}
- if let Some(intentional_burn_amount) = self.intentional_burn_amount {
- if self.actual_burn_amount > 0 {
+ if let Some(intentional_burn_atoms) = self.intentional_burn_atoms {
+ if self.actual_burn_atoms > 0 {
s.push_str(&format!(
- ", but intended to burn {intentional_burn_amount}"
+ ", but intended to burn {intentional_burn_atoms}"
));
- let intentional_burn_amount = intentional_burn_amount as u128;
- if intentional_burn_amount > self.actual_burn_amount {
+ let intentional_burn_atoms = intentional_burn_atoms as u128;
+ if intentional_burn_atoms > self.actual_burn_atoms {
s.push_str(&format!(
"; burned {} too few",
- intentional_burn_amount - self.actual_burn_amount
+ intentional_burn_atoms - self.actual_burn_atoms
));
} else {
s.push_str(&format!(
"; burned {} too many",
- self.actual_burn_amount - intentional_burn_amount
+ self.actual_burn_atoms - intentional_burn_atoms
));
}
} else if self.burns_mint_batons {
s.push_str(&format!(
- "; expected {intentional_burn_amount} base tokens to be \
- burned instead"
+ "; expected {intentional_burn_atoms} atoms to be burned \
+ instead"
));
} else {
s.push_str(&format!(
- "Expected {intentional_burn_amount} base tokens to be \
- burned, but none found"
+ "Expected {intentional_burn_atoms} atoms to be burned, \
+ but none found"
));
}
}
@@ -133,7 +125,7 @@
use crate::{
alp::{burn_section, sections_opreturn, send_section},
test_helpers::{
- empty_entry, meta_alp, spent_amount, spent_baton, verify, TOKEN_ID1,
+ empty_entry, meta_alp, spent_atoms, spent_baton, verify, TOKEN_ID1,
},
token_tx::TokenTxEntry,
token_type::AlpTokenType::*,
@@ -144,12 +136,12 @@
assert_str_eq!(empty_entry().burn_summary(), "OK: No burn");
assert_str_eq!(
TokenTxEntry {
- actual_burn_amount: 1234,
- intentional_burn_amount: Some(1234),
+ actual_burn_atoms: 1234,
+ intentional_burn_atoms: Some(1234),
..empty_entry()
}
.burn_summary(),
- "OK: Intentional burn of 1234 base tokens",
+ "OK: Intentional burn of 1234 atoms",
);
}
@@ -185,12 +177,12 @@
);
assert_str_eq!(
TokenTxEntry {
- actual_burn_amount: 1234,
+ actual_burn_atoms: 1234,
burns_mint_batons: true,
..empty_entry()
}
.burn_summary(),
- "Unexpected burn: Burns mint baton(s) and 1234 base tokens",
+ "Unexpected burn: Burns mint baton(s) and 1234 atoms",
);
}
@@ -198,11 +190,11 @@
fn test_burn_summary_burns_tokens() {
assert_str_eq!(
TokenTxEntry {
- actual_burn_amount: 1234,
+ actual_burn_atoms: 1234,
..empty_entry()
}
.burn_summary(),
- "Unexpected burn: Burns 1234 base tokens",
+ "Unexpected burn: Burns 1234 atoms",
);
}
@@ -210,52 +202,51 @@
fn test_burn_summary_wrong_intentional_burn() {
assert_str_eq!(
TokenTxEntry {
- actual_burn_amount: 1000,
- intentional_burn_amount: Some(3000),
+ actual_burn_atoms: 1000,
+ intentional_burn_atoms: Some(3000),
..empty_entry()
}
.burn_summary(),
- "Unexpected burn: Burns 1000 base tokens, but intended to burn \
- 3000; burned 2000 too few",
+ "Unexpected burn: Burns 1000 atoms, but intended to burn 3000; \
+ burned 2000 too few",
);
assert_str_eq!(
TokenTxEntry {
- actual_burn_amount: 3000,
- intentional_burn_amount: Some(1000),
+ actual_burn_atoms: 3000,
+ intentional_burn_atoms: Some(1000),
..empty_entry()
}
.burn_summary(),
- "Unexpected burn: Burns 3000 base tokens, but intended to burn \
- 1000; burned 2000 too many",
+ "Unexpected burn: Burns 3000 atoms, but intended to burn 1000; \
+ burned 2000 too many",
);
assert_str_eq!(
TokenTxEntry {
- intentional_burn_amount: Some(1000),
+ intentional_burn_atoms: Some(1000),
..empty_entry()
}
.burn_summary(),
- "Unexpected burn: Expected 1000 base tokens to be burned, but \
- none found",
+ "Unexpected burn: Expected 1000 atoms to be burned, but none found",
);
assert_str_eq!(
TokenTxEntry {
- intentional_burn_amount: Some(1000),
+ intentional_burn_atoms: Some(1000),
burns_mint_batons: true,
..empty_entry()
}
.burn_summary(),
- "Unexpected burn: Burns mint baton(s); expected 1000 base tokens \
- to be burned instead",
+ "Unexpected burn: Burns mint baton(s); expected 1000 atoms to be \
+ burned instead",
);
assert_str_eq!(
TokenTxEntry {
- actual_burn_amount: 3000,
- intentional_burn_amount: Some(1000),
+ actual_burn_atoms: 3000,
+ intentional_burn_atoms: Some(1000),
burns_mint_batons: true,
..empty_entry()
}
.burn_summary(),
- "Unexpected burn: Burns mint baton(s) and 3000 base tokens, but \
+ "Unexpected burn: Burns mint baton(s) and 3000 atoms, but \
intended to burn 1000; burned 2000 too many",
);
}
@@ -269,14 +260,14 @@
send_section(&TOKEN_ID1, Standard, [1, 2, 3, 4, 5]),
send_section(&TOKEN_ID1, Standard, [1, 2]),
]),
- &[spent_amount(meta_alp(TOKEN_ID1), 2)],
+ &[spent_atoms(meta_alp(TOKEN_ID1), 2)],
)
.entries[0]
.burn_summary(),
- "Unexpected burn: Burns 2 base tokens. Reason(s): Invalid \
- coloring at pushdata idx 0: Too few outputs, expected 4 but got \
- 3. Invalid coloring at pushdata idx 1: Too few outputs, expected \
- 6 but got 3. Insufficient token input output sum: 2 < 3",
+ "Unexpected burn: Burns 2 atoms. Reason(s): Invalid coloring at \
+ pushdata idx 0: Too few outputs, expected 4 but got 3. Invalid \
+ coloring at pushdata idx 1: Too few outputs, expected 6 but got \
+ 3. Insufficient token input output sum: 2 < 3",
);
assert_str_eq!(
verify::<2>(
@@ -286,16 +277,16 @@
burn_section(&TOKEN_ID1, Standard, 5),
]),
&[
- spent_amount(meta_alp(TOKEN_ID1), 2),
+ spent_atoms(meta_alp(TOKEN_ID1), 2),
spent_baton(meta_alp(TOKEN_ID1)),
],
)
.entries[0]
.burn_summary(),
- "Unexpected burn: Burns mint baton(s) and 2 base tokens, but \
- intended to burn 5; burned 3 too few. Reason(s): Invalid \
- coloring at pushdata idx 0: Too few outputs, expected 4 but got \
- 3. Insufficient token input output sum: 2 < 3",
+ "Unexpected burn: Burns mint baton(s) and 2 atoms, but intended \
+ to burn 5; burned 3 too few. Reason(s): Invalid coloring at \
+ pushdata idx 0: Too few outputs, expected 4 but got 3. \
+ Insufficient token input output sum: 2 < 3",
);
}
}
diff --git a/chronik/bitcoinsuite-slp/src/color.rs b/chronik/bitcoinsuite-slp/src/color.rs
--- a/chronik/bitcoinsuite-slp/src/color.rs
+++ b/chronik/bitcoinsuite-slp/src/color.rs
@@ -15,8 +15,7 @@
parsed::{ParsedData, ParsedGenesis, ParsedMintData, ParsedTxType},
slp,
structs::{
- Amount, GenesisInfo, Token, TokenMeta, TokenOutput, TokenVariant,
- TxType,
+ Atoms, GenesisInfo, Token, TokenMeta, TokenOutput, TokenVariant, TxType,
},
token_id::TokenId,
token_type::{SlpTokenType, TokenType},
@@ -103,7 +102,7 @@
/// Which token meta should be burned
pub meta: TokenMeta,
/// How many tokens should be burned
- pub amount: Amount,
+ pub atoms: Atoms,
}
/// Error when trying to color a parsed section.
@@ -164,16 +163,16 @@
/// Outputs cannot be colored twice by different sections
#[error(
- "Overlapping amount when trying to color {amount} at index \
+ "Overlapping atoms when trying to color {atoms} at index \
{output_idx}, output is already colored with {prev_token}"
)]
- OverlappingAmount {
+ OverlappingAtoms {
/// Previous token the output is already colored with
prev_token: Token,
/// Index of the output that we tried to color twice
output_idx: usize,
/// Amount that tried to color an output twice
- amount: Amount,
+ atoms: Atoms,
},
/// Outputs cannot be colored twice by different sections
@@ -328,7 +327,7 @@
}
ParsedTxType::Mint(mint) => self.color_mint(meta, mint),
ParsedTxType::Send(send) => self.color_send(meta, send),
- ParsedTxType::Burn(amount) => self.color_burn(meta, amount),
+ ParsedTxType::Burn(burn) => self.color_burn(meta, burn),
ParsedTxType::Unknown => self.color_unknown(meta),
}
}
@@ -384,16 +383,16 @@
let mut out_of_range_idx = None;
// Verify no outputs have been colored already
- for (output_idx, &amount) in
+ for (output_idx, &atoms) in
mint_data.amounts_range().zip(&mint_data.amounts)
{
- if amount != 0 {
+ if atoms != 0 {
match self.outputs.get(output_idx) {
Some(Some(token)) => {
- return Err(OverlappingAmount {
+ return Err(OverlappingAtoms {
prev_token: self.token(token),
output_idx,
- amount,
+ atoms,
});
}
Some(None) => {}
@@ -425,16 +424,16 @@
}
// Now, color all outputs
- for (output_idx, &amount) in
+ for (output_idx, &atoms) in
mint_data.amounts_range().zip(&mint_data.amounts)
{
if output_idx >= self.outputs.len() {
break;
}
- if amount > 0 {
+ if atoms > 0 {
self.outputs[output_idx] = Some(TokenOutput {
token_idx,
- variant: TokenVariant::Amount(amount),
+ variant: TokenVariant::Atoms(atoms),
});
}
}
@@ -454,18 +453,18 @@
fn color_send(
&mut self,
meta: TokenMeta,
- amounts: Vec<Amount>,
+ atoms: Vec<Atoms>,
) -> Result<(), ColorError> {
// Verify no outputs have been colored already
let mut out_of_range_idx = None;
- for (idx, &amount) in amounts.iter().enumerate() {
- if amount != 0 {
+ for (idx, &atoms) in atoms.iter().enumerate() {
+ if atoms != 0 {
match self.outputs.get(idx + 1) {
Some(Some(token)) => {
- return Err(OverlappingAmount {
+ return Err(OverlappingAtoms {
prev_token: self.token(token),
output_idx: idx + 1,
- amount,
+ atoms,
})
}
Some(None) => {}
@@ -486,15 +485,15 @@
// Color outputs and also calculate the required input sum
let mut required_input_sum = 0u128;
- for (idx, &amount) in amounts.iter().enumerate() {
- if amount == 0 {
+ for (idx, &atoms) in atoms.iter().enumerate() {
+ if atoms == 0 {
continue;
}
- required_input_sum += u128::from(amount);
+ required_input_sum += u128::from(atoms);
if let Some(output) = self.outputs.get_mut(idx + 1) {
*output = Some(TokenOutput {
token_idx: self.sections.len(),
- variant: TokenVariant::Amount(amount),
+ variant: TokenVariant::Atoms(atoms),
});
}
}
@@ -512,7 +511,7 @@
fn color_burn(
&mut self,
meta: TokenMeta,
- amount: Amount,
+ atoms: Atoms,
) -> Result<(), ColorError> {
for (prev_burn_idx, prev_burn) in
self.intentional_burns.iter().enumerate()
@@ -525,8 +524,7 @@
});
}
}
- self.intentional_burns
- .push(IntentionalBurn { meta, amount });
+ self.intentional_burns.push(IntentionalBurn { meta, atoms });
Ok(())
}
diff --git a/chronik/bitcoinsuite-slp/src/lokad_id.rs b/chronik/bitcoinsuite-slp/src/lokad_id.rs
--- a/chronik/bitcoinsuite-slp/src/lokad_id.rs
+++ b/chronik/bitcoinsuite-slp/src/lokad_id.rs
@@ -256,12 +256,12 @@
outputs: vec![
TxOutput {
script: script(b"\x6a\x046789\x04yyyy"),
- value: 0,
+ sats: 0,
},
// Ignored: OP_RETURN must be first
TxOutput {
script: script(b"\x6a\x04zzzz"),
- value: 0,
+ sats: 0,
},
],
locktime: 0,
@@ -278,7 +278,7 @@
}],
outputs: vec![TxOutput {
script: script(b"\x6a\x50\x046789\x044321"),
- value: 0,
+ sats: 0,
}],
locktime: 0,
})
diff --git a/chronik/bitcoinsuite-slp/src/parsed.rs b/chronik/bitcoinsuite-slp/src/parsed.rs
--- a/chronik/bitcoinsuite-slp/src/parsed.rs
+++ b/chronik/bitcoinsuite-slp/src/parsed.rs
@@ -6,7 +6,7 @@
use std::ops::Range;
-use crate::structs::{Amount, GenesisInfo, TokenMeta, TxType};
+use crate::structs::{Atoms, GenesisInfo, TokenMeta, TxType};
/// Parsed data from SLP or ALP.
/// For SLP, this is from parsing an entire `OP_RETURN`.
@@ -28,9 +28,9 @@
/// Parsed MINT tx with mint data
Mint(ParsedMintData),
/// Parsed SEND tx with send amounts
- Send(Vec<Amount>),
+ Send(Vec<Atoms>),
/// Parsed BURN tx with the burned amount
- Burn(Amount),
+ Burn(Atoms),
/// Parsed unknown token type
Unknown,
}
@@ -47,9 +47,10 @@
/// Mint data of a GENESIS or MINT tx
#[derive(Clone, Debug, Default, Eq, Hash, PartialEq)]
pub struct ParsedMintData {
- /// List of amounts to be minted by this tx, each having their own tx
- /// output
- pub amounts: Vec<Amount>,
+ /// List of amounts (in atoms) to be minted by this tx, each having their
+ /// own tx output. NB this is useful to leave as "amounts" because
+ /// "atoms" is already plural.
+ pub amounts: Vec<Atoms>,
/// Number of mint batons to create, each having their own tx output
pub num_batons: usize,
}
diff --git a/chronik/bitcoinsuite-slp/src/slp/build.rs b/chronik/bitcoinsuite-slp/src/slp/build.rs
--- a/chronik/bitcoinsuite-slp/src/slp/build.rs
+++ b/chronik/bitcoinsuite-slp/src/slp/build.rs
@@ -13,7 +13,7 @@
SLP_LOKAD_ID, TOKEN_TYPE_V1, TOKEN_TYPE_V1_NFT1_CHILD,
TOKEN_TYPE_V1_NFT1_GROUP, TOKEN_TYPE_V2,
},
- structs::{Amount, GenesisInfo},
+ structs::{Atoms, GenesisInfo},
token_id::TokenId,
token_type::SlpTokenType,
};
@@ -35,7 +35,7 @@
genesis_info: &GenesisInfo,
token_type: SlpTokenType,
mint_baton_out_idx: Option<u8>,
- initial_quantity: Amount,
+ initial_quantity: Atoms,
) -> Script {
let mut script = ScriptMut::with_capacity(64);
script.put_opcodes([OP_RETURN]);
@@ -71,7 +71,7 @@
token_id: &TokenId,
token_type: SlpTokenType,
mint_baton_out_idx: Option<u8>,
- additional_quantity: Amount,
+ additional_quantity: Atoms,
) -> Script {
let mut script = ScriptMut::with_capacity(64);
script.put_opcodes([OP_RETURN]);
@@ -90,7 +90,7 @@
/// Build an SLP OP_RETURN MINT script for V2 MintVault
pub fn mint_vault_opreturn(
token_id: &TokenId,
- additional_quantites: impl IntoIterator<Item = Amount>,
+ additional_quantites: impl IntoIterator<Item = Atoms>,
) -> Script {
let mut script = ScriptMut::with_capacity(64);
script.put_opcodes([OP_RETURN]);
@@ -108,7 +108,7 @@
pub fn send_opreturn(
token_id: &TokenId,
token_type: SlpTokenType,
- send_amounts: &[Amount],
+ send_atoms: &[Atoms],
) -> Script {
let mut script = ScriptMut::with_capacity(64);
script.put_opcodes([OP_RETURN]);
@@ -116,8 +116,8 @@
script.put_slp_pushdata(token_type_bytes(token_type));
script.put_slp_pushdata(SEND);
script.put_slp_pushdata(&token_id.to_be_bytes());
- for &amount in send_amounts {
- script.put_slp_pushdata(&amount.to_be_bytes());
+ for &atoms in send_atoms {
+ script.put_slp_pushdata(&atoms.to_be_bytes());
}
script.freeze()
}
@@ -126,7 +126,7 @@
pub fn burn_opreturn(
token_id: &TokenId,
token_type: SlpTokenType,
- burn_amount: Amount,
+ burn_atoms: Atoms,
) -> Script {
let mut script = ScriptMut::with_capacity(1 + 5 + 2 + 5 + 33 + 9);
script.put_opcodes([OP_RETURN]);
@@ -134,6 +134,6 @@
script.put_slp_pushdata(token_type_bytes(token_type));
script.put_slp_pushdata(BURN);
script.put_slp_pushdata(&token_id.to_be_bytes());
- script.put_slp_pushdata(&burn_amount.to_be_bytes());
+ script.put_slp_pushdata(&burn_atoms.to_be_bytes());
script.freeze()
}
diff --git a/chronik/bitcoinsuite-slp/src/slp/burn.rs b/chronik/bitcoinsuite-slp/src/slp/burn.rs
--- a/chronik/bitcoinsuite-slp/src/slp/burn.rs
+++ b/chronik/bitcoinsuite-slp/src/slp/burn.rs
@@ -7,7 +7,7 @@
use crate::{
parsed::{ParsedData, ParsedTxType},
slp::{
- common::{parse_amount, parse_token_id},
+ common::{parse_atoms, parse_token_id},
ParseError,
},
structs::TokenMeta,
@@ -33,7 +33,7 @@
let token_burn_quantity = data_iter.next().unwrap();
let token_burn_quantity =
- parse_amount(&token_burn_quantity, "token_burn_quantity")?;
+ parse_atoms(&token_burn_quantity, "token_burn_quantity")?;
Ok(ParsedData {
meta: TokenMeta {
diff --git a/chronik/bitcoinsuite-slp/src/slp/common.rs b/chronik/bitcoinsuite-slp/src/slp/common.rs
--- a/chronik/bitcoinsuite-slp/src/slp/common.rs
+++ b/chronik/bitcoinsuite-slp/src/slp/common.rs
@@ -2,17 +2,17 @@
// Distributed under the MIT software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
-use crate::{slp::ParseError, structs::Amount, token_id::TokenId};
+use crate::{slp::ParseError, structs::Atoms, token_id::TokenId};
-pub(crate) fn parse_amount(
- amount_bytes: &[u8],
+pub(crate) fn parse_atoms(
+ atoms_bytes: &[u8],
field_name: &'static str,
-) -> Result<Amount, ParseError> {
- Ok(Amount::from_be_bytes(amount_bytes.try_into().map_err(
+) -> Result<Atoms, ParseError> {
+ Ok(Atoms::from_be_bytes(atoms_bytes.try_into().map_err(
|_| ParseError::InvalidFieldSize {
field_name,
expected: &[8],
- actual: amount_bytes.len(),
+ actual: atoms_bytes.len(),
},
)?))
}
diff --git a/chronik/bitcoinsuite-slp/src/slp/error.rs b/chronik/bitcoinsuite-slp/src/slp/error.rs
--- a/chronik/bitcoinsuite-slp/src/slp/error.rs
+++ b/chronik/bitcoinsuite-slp/src/slp/error.rs
@@ -6,7 +6,7 @@
use bytes::Bytes;
use thiserror::Error;
-use crate::structs::Amount;
+use crate::structs::Atoms;
/// Errors when parsing a SLP tx.
#[derive(Clone, Debug, Error, Eq, PartialEq)]
@@ -95,7 +95,7 @@
#[error(
"Invalid NFT1 Child GENESIS initial quantity, expected 1 but got {0}"
)]
- Nft1ChildInvalidInitialQuantity(Amount),
+ Nft1ChildInvalidInitialQuantity(Atoms),
/// NFT1 Child GENESIS must have 0 decimals
#[error("Invalid NFT1 Child GENESIS decimals, expected 0 but got {0}")]
diff --git a/chronik/bitcoinsuite-slp/src/slp/genesis.rs b/chronik/bitcoinsuite-slp/src/slp/genesis.rs
--- a/chronik/bitcoinsuite-slp/src/slp/genesis.rs
+++ b/chronik/bitcoinsuite-slp/src/slp/genesis.rs
@@ -10,7 +10,7 @@
use crate::{
parsed::{ParsedData, ParsedGenesis, ParsedMintData, ParsedTxType},
- slp::{common::parse_amount, ParseError},
+ slp::{common::parse_atoms, ParseError},
structs::{GenesisInfo, TokenMeta},
token_id::TokenId,
token_type::{SlpTokenType, TokenType},
@@ -60,7 +60,7 @@
});
}
let mint_field = parse_mint_field(token_type, &mint_field)?;
- let initial_quantity = parse_amount(&initial_quantity, "initial_quantity")?;
+ let initial_quantity = parse_atoms(&initial_quantity, "initial_quantity")?;
if decimals[0] > 9 {
return Err(ParseError::InvalidDecimals {
actual: decimals[0] as usize,
diff --git a/chronik/bitcoinsuite-slp/src/slp/mint.rs b/chronik/bitcoinsuite-slp/src/slp/mint.rs
--- a/chronik/bitcoinsuite-slp/src/slp/mint.rs
+++ b/chronik/bitcoinsuite-slp/src/slp/mint.rs
@@ -7,7 +7,7 @@
use crate::{
parsed::{ParsedData, ParsedMintData, ParsedTxType},
slp::{
- common::{parse_amount, parse_token_id},
+ common::{parse_atoms, parse_token_id},
ParseError,
},
structs::TokenMeta,
@@ -49,7 +49,7 @@
let additional_quantity = data_iter.next().unwrap();
assert!(data_iter.next().is_none());
let additional_quantity =
- parse_amount(&additional_quantity, "additional_quantity")?;
+ parse_atoms(&additional_quantity, "additional_quantity")?;
let mut amounts = vec![additional_quantity];
if let Some(mint_baton_out_idx) = mint_baton_out_idx {
diff --git a/chronik/bitcoinsuite-slp/src/slp/mint_vault.rs b/chronik/bitcoinsuite-slp/src/slp/mint_vault.rs
--- a/chronik/bitcoinsuite-slp/src/slp/mint_vault.rs
+++ b/chronik/bitcoinsuite-slp/src/slp/mint_vault.rs
@@ -9,7 +9,7 @@
use crate::{
parsed::{ParsedData, ParsedMintData, ParsedTxType},
slp::{
- common::{parse_amount, parse_token_id},
+ common::{parse_atoms, parse_token_id},
ParseError,
},
structs::TokenMeta,
@@ -65,7 +65,7 @@
let amounts = data_iter
.enumerate()
.map(|(idx, quantity)| {
- parse_amount(&quantity, ADDITIONAL_QUANTITY_FIELD_NAMES[idx])
+ parse_atoms(&quantity, ADDITIONAL_QUANTITY_FIELD_NAMES[idx])
})
.collect::<Result<Vec<_>, _>>()?;
diff --git a/chronik/bitcoinsuite-slp/src/slp/send.rs b/chronik/bitcoinsuite-slp/src/slp/send.rs
--- a/chronik/bitcoinsuite-slp/src/slp/send.rs
+++ b/chronik/bitcoinsuite-slp/src/slp/send.rs
@@ -9,7 +9,7 @@
use crate::{
parsed::{ParsedData, ParsedTxType},
slp::{
- common::{parse_amount, parse_token_id},
+ common::{parse_atoms, parse_token_id},
ParseError,
},
structs::TokenMeta,
@@ -66,7 +66,7 @@
let output_quantities = data_iter
.enumerate()
.map(|(idx, quantity)| {
- parse_amount(&quantity, TOKEN_OUTPUT_QUANTITY_FIELD_NAMES[idx])
+ parse_atoms(&quantity, TOKEN_OUTPUT_QUANTITY_FIELD_NAMES[idx])
})
.collect::<Result<Vec<_>, _>>()?;
diff --git a/chronik/bitcoinsuite-slp/src/structs.rs b/chronik/bitcoinsuite-slp/src/structs.rs
--- a/chronik/bitcoinsuite-slp/src/structs.rs
+++ b/chronik/bitcoinsuite-slp/src/structs.rs
@@ -10,8 +10,8 @@
use crate::{token_id::TokenId, token_type::TokenType};
-/// SLP or ALP amount
-pub type Amount = u64;
+/// SLP or ALP amount in atoms (base tokens)
+pub type Atoms = u64;
/// Common token info identifying tokens, which are essential for verification.
/// A token ID uniquely determines the protocol and token type, and bundling
@@ -50,11 +50,11 @@
UNKNOWN,
}
-/// "Taint" of a UTXO, e.g a token amount or mint baton
+/// "Taint" of a UTXO, e.g a token amount in atoms or mint baton
#[derive(Clone, Copy, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)]
pub enum TokenVariant {
- /// UTXO has a token amount that can be transferred
- Amount(Amount),
+ /// UTXO has a token atoms that can be transferred
+ Atoms(Atoms),
/// UTXO can be used to mint new tokens
MintBaton,
/// UTXO has a new unknown token type.
@@ -70,7 +70,8 @@
pub struct TokenOutput {
/// Index of the token metadata in the tx.
pub token_idx: usize,
- /// Amount of the token, or whether it's a mint baton, or an unknown token.
+ /// Amount of the token in base tokens (aka atoms), or whether it's a mint
+ /// baton, or an unknown token.
pub variant: TokenVariant,
}
@@ -81,7 +82,8 @@
pub struct Token {
/// Which token ID etc. this token has.
pub meta: TokenMeta,
- /// Amount of the token, or whether it's a mint baton, or an unknown token.
+ /// Amount of the token in atoms aka base tokens, or whether it's a mint
+ /// baton, or an unknown token.
pub variant: TokenVariant,
}
@@ -117,9 +119,9 @@
impl TokenVariant {
/// Amount associated with the token variant.
- pub fn amount(&self) -> Amount {
+ pub fn atoms(&self) -> Atoms {
match self {
- &TokenVariant::Amount(amount) => amount,
+ &TokenVariant::Atoms(atoms) => atoms,
TokenVariant::MintBaton => 0,
TokenVariant::Unknown(_) => 0,
}
@@ -134,7 +136,7 @@
impl std::fmt::Display for Token {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self.variant {
- TokenVariant::Amount(amount) => write!(f, "{amount}")?,
+ TokenVariant::Atoms(atoms) => write!(f, "{atoms}")?,
TokenVariant::MintBaton => write!(f, "Mint baton")?,
TokenVariant::Unknown(_) => {
return write!(f, "{}", self.meta.token_type)
diff --git a/chronik/bitcoinsuite-slp/src/test_helpers.rs b/chronik/bitcoinsuite-slp/src/test_helpers.rs
--- a/chronik/bitcoinsuite-slp/src/test_helpers.rs
+++ b/chronik/bitcoinsuite-slp/src/test_helpers.rs
@@ -15,7 +15,7 @@
color::ColoredTx,
parsed::{ParsedData, ParsedMintData},
structs::{
- Amount, GenesisInfo, Token, TokenMeta, TokenOutput, TokenVariant,
+ Atoms, GenesisInfo, Token, TokenMeta, TokenOutput, TokenVariant,
},
token_id::TokenId,
token_tx::{TokenTx, TokenTxEntry},
@@ -74,27 +74,27 @@
}
}
-/// Shortcut for a SpentToken amount
-pub fn spent_amount(meta: TokenMeta, amount: u64) -> Option<SpentToken> {
+/// Shortcut for a SpentToken atoms
+pub fn spent_atoms(meta: TokenMeta, atoms: u64) -> Option<SpentToken> {
Some(SpentToken {
token: Token {
meta,
- variant: TokenVariant::Amount(amount),
+ variant: TokenVariant::Atoms(atoms),
},
group_token_meta: None,
})
}
-/// Shortcut for a SpentToken amount with a group
-pub fn spent_amount_group(
+/// Shortcut for a SpentToken atoms with a group
+pub fn spent_atoms_group(
meta: TokenMeta,
- amount: u64,
+ atoms: u64,
group_token_meta: TokenMeta,
) -> Option<SpentToken> {
Some(SpentToken {
token: Token {
meta,
- variant: TokenVariant::Amount(amount),
+ variant: TokenVariant::Atoms(atoms),
},
group_token_meta: Some(group_token_meta),
})
@@ -111,11 +111,11 @@
})
}
-/// Shortcut for a TokenOutput amount
-pub fn token_amount<const N: usize>(amount: u64) -> Option<TokenOutput> {
+/// Shortcut for a TokenOutput atoms
+pub fn token_atoms<const N: usize>(atoms: u64) -> Option<TokenOutput> {
Some(TokenOutput {
token_idx: N,
- variant: TokenVariant::Amount(amount),
+ variant: TokenVariant::Atoms(atoms),
})
}
@@ -147,8 +147,8 @@
tx_type: None,
genesis_info: None,
group_token_meta: None,
- intentional_burn_amount: None,
- actual_burn_amount: 0,
+ intentional_burn_atoms: None,
+ actual_burn_atoms: 0,
is_invalid: false,
burns_mint_batons: false,
burn_error: None,
@@ -160,14 +160,14 @@
/// Shortcut to make an ALP MINT section
pub fn alp_mint<const N: usize>(
token_id: &TokenId,
- amounts: [Amount; N],
+ atoms: [Atoms; N],
num_batons: usize,
) -> Bytes {
mint_section(
token_id,
AlpTokenType::Standard,
&ParsedMintData {
- amounts: amounts.into_iter().collect(),
+ amounts: atoms.into_iter().collect(),
num_batons,
},
)
@@ -182,7 +182,7 @@
TXID,
TxMut {
outputs: [
- [TxOutput { value: 0, script }].as_ref(),
+ [TxOutput { sats: 0, script }].as_ref(),
&vec![TxOutput::default(); N],
]
.concat(),
diff --git a/chronik/bitcoinsuite-slp/src/token_tx.rs b/chronik/bitcoinsuite-slp/src/token_tx.rs
--- a/chronik/bitcoinsuite-slp/src/token_tx.rs
+++ b/chronik/bitcoinsuite-slp/src/token_tx.rs
@@ -6,7 +6,7 @@
use crate::{
color::{FailedColoring, FailedParsing},
- structs::{Amount, GenesisInfo, Token, TokenMeta, TokenOutput, TxType},
+ structs::{Atoms, GenesisInfo, Token, TokenMeta, TokenOutput, TxType},
verify::{BurnError, SpentToken},
};
@@ -51,11 +51,12 @@
/// Whether all input tokens have been burned because of some rule
/// violation. This includes bare burns.
pub is_invalid: bool,
- /// How many tokens of this token were intentionally burned (using a BURN)
- pub intentional_burn_amount: Option<Amount>,
- /// How many tokens were actually burned, independent of the intentional
- /// amount.
- pub actual_burn_amount: u128,
+ /// How many atoms (aka base tokens) of this token were intentionally
+ /// burned (using a BURN)
+ pub intentional_burn_atoms: Option<Atoms>,
+ /// How many atoms (aka base tokens) were actually burned, independent of
+ /// the intentional amount.
+ pub actual_burn_atoms: u128,
/// Whether any mint batons have been burned of this token.
pub burns_mint_batons: bool,
/// Burn message that may have caused the tokens to be burned.
diff --git a/chronik/bitcoinsuite-slp/src/verify.rs b/chronik/bitcoinsuite-slp/src/verify.rs
--- a/chronik/bitcoinsuite-slp/src/verify.rs
+++ b/chronik/bitcoinsuite-slp/src/verify.rs
@@ -50,7 +50,7 @@
InsufficientInputSum {
/// Required minimum inputs as specified in the outputs
required: u128,
- /// Actual supplied token amount
+ /// Actual supplied token amount in atoms (aka base tokens)
actual: u128,
},
}
@@ -80,7 +80,7 @@
}
struct BareBurn {
- burn_amount: u128,
+ burn_atoms: u128,
burns_mint_batons: bool,
group_token_meta: Option<TokenMeta>,
is_invalid: bool,
@@ -107,8 +107,8 @@
genesis_info: None,
group_token_meta: None,
is_invalid: false,
- intentional_burn_amount: Some(intentional_burn.amount),
- actual_burn_amount: 0,
+ intentional_burn_atoms: Some(intentional_burn.atoms),
+ actual_burn_atoms: 0,
burns_mint_batons: false,
burn_error: None,
has_colored_out_of_range: false,
@@ -139,8 +139,8 @@
},
group_token_meta: None,
is_invalid: true,
- intentional_burn_amount: None,
- actual_burn_amount: 0,
+ intentional_burn_atoms: None,
+ actual_burn_atoms: 0,
burns_mint_batons: false,
burn_error: None,
has_colored_out_of_range: false,
@@ -156,7 +156,7 @@
if bare_burn.burns_mint_batons {
entry.is_invalid = true;
}
- entry.actual_burn_amount = bare_burn.burn_amount;
+ entry.actual_burn_atoms = bare_burn.burn_atoms;
entry.burns_mint_batons = bare_burn.burns_mint_batons;
entry.group_token_meta = bare_burn.group_token_meta;
continue;
@@ -167,8 +167,8 @@
genesis_info: None,
group_token_meta: bare_burn.group_token_meta,
is_invalid: bare_burn.is_invalid,
- intentional_burn_amount: None,
- actual_burn_amount: bare_burn.burn_amount,
+ intentional_burn_atoms: None,
+ actual_burn_atoms: bare_burn.burn_atoms,
burns_mint_batons: bare_burn.burns_mint_batons,
burn_error: None,
has_colored_out_of_range: false,
@@ -209,9 +209,9 @@
genesis_info: section.genesis_info.clone(),
group_token_meta: self.inherited_group_token_meta(&section.meta),
is_invalid: false,
- intentional_burn_amount: self
- .intentional_burn_amount(tx, &section.meta),
- actual_burn_amount: 0,
+ intentional_burn_atoms: self
+ .intentional_burn_atoms(tx, &section.meta),
+ actual_burn_atoms: 0,
burns_mint_batons: false,
burn_error: None,
has_colored_out_of_range: section.has_colored_out_of_range,
@@ -224,7 +224,7 @@
{
return TokenTxEntry {
is_invalid: true,
- actual_burn_amount: input_sum,
+ actual_burn_atoms: input_sum,
burns_mint_batons: self.has_mint_baton(&section.meta),
burn_error: Some(TooManyTxInputs(self.spent_tokens.len())),
..entry
@@ -241,7 +241,7 @@
Some(Some(spent_token))
if spent_token.token.meta.token_type
== TokenType::Slp(SlpTokenType::Nft1Group)
- && spent_token.token.variant.amount() > 0 =>
+ && spent_token.token.variant.atoms() > 0 =>
{
TokenTxEntry {
group_token_meta: Some(spent_token.token.meta),
@@ -263,13 +263,13 @@
TxType::MINT if section.is_mint_vault_mint() => {
if self.has_mint_vault() {
return TokenTxEntry {
- actual_burn_amount: input_sum,
+ actual_burn_atoms: input_sum,
..entry
};
}
TokenTxEntry {
is_invalid: true,
- actual_burn_amount: input_sum,
+ actual_burn_atoms: input_sum,
burn_error: Some(MissingMintVault),
..entry
}
@@ -279,13 +279,13 @@
TxType::MINT => {
if self.has_mint_baton(&section.meta) {
return TokenTxEntry {
- actual_burn_amount: input_sum,
+ actual_burn_atoms: input_sum,
..entry
};
}
TokenTxEntry {
is_invalid: true,
- actual_burn_amount: input_sum,
+ actual_burn_atoms: input_sum,
burn_error: Some(MissingMintBaton),
..entry
}
@@ -295,7 +295,7 @@
TxType::SEND if input_sum < section.required_input_sum => {
TokenTxEntry {
is_invalid: true,
- actual_burn_amount: input_sum,
+ actual_burn_atoms: input_sum,
burns_mint_batons: self.has_mint_baton(&section.meta),
burn_error: Some(InsufficientInputSum {
required: section.required_input_sum,
@@ -308,9 +308,9 @@
// Valid SEND
TxType::SEND => {
let output_sum = self.calc_output_sum(tx, &section.meta);
- let actual_burn_amount = input_sum - output_sum;
+ let actual_burn_atoms = input_sum - output_sum;
TokenTxEntry {
- actual_burn_amount,
+ actual_burn_atoms,
burns_mint_batons: self.has_mint_baton(&section.meta),
..entry
}
@@ -359,7 +359,7 @@
.iter()
.flatten()
.filter(|token| &token.token.meta == meta)
- .map(|token| token.token.variant.amount() as u128)
+ .map(|token| token.token.variant.atoms() as u128)
.sum()
}
@@ -368,7 +368,7 @@
.iter()
.flatten()
.filter(|token| &tx.sections[token.token_idx].meta == meta)
- .map(|token| token.variant.amount() as u128)
+ .map(|token| token.variant.atoms() as u128)
.sum()
}
@@ -383,7 +383,7 @@
.and_then(|token| token.group_token_meta)
}
- fn intentional_burn_amount(
+ fn intentional_burn_atoms(
&self,
tx: &ColoredTx,
meta: &TokenMeta,
@@ -391,7 +391,7 @@
tx.intentional_burns
.iter()
.find(|burn| &burn.meta == meta)
- .map(|burn| burn.amount)
+ .map(|burn| burn.atoms)
}
// Bare burns: spent tokens without a corresponding section
@@ -415,7 +415,7 @@
let bare_burn =
bare_burns.entry(&input.token.meta).or_insert(BareBurn {
- burn_amount: 0,
+ burn_atoms: 0,
burns_mint_batons: false,
group_token_meta: input.group_token_meta,
is_invalid: false,
@@ -442,8 +442,8 @@
// All other bare burns are invalid
bare_burn.is_invalid = true;
match input.token.variant {
- TokenVariant::Amount(amount) => {
- bare_burn.burn_amount += u128::from(amount)
+ TokenVariant::Atoms(atoms) => {
+ bare_burn.burn_atoms += u128::from(atoms)
}
TokenVariant::MintBaton => bare_burn.burns_mint_batons = true,
TokenVariant::Unknown(_) => {}
diff --git a/chronik/bitcoinsuite-slp/tests/test_color_alp_all_the_things.rs b/chronik/bitcoinsuite-slp/tests/test_color_alp_all_the_things.rs
--- a/chronik/bitcoinsuite-slp/tests/test_color_alp_all_the_things.rs
+++ b/chronik/bitcoinsuite-slp/tests/test_color_alp_all_the_things.rs
@@ -17,8 +17,7 @@
},
parsed::{ParsedData, ParsedMintData},
structs::{
- Amount, GenesisInfo, Token, TokenMeta, TokenOutput, TokenVariant,
- TxType,
+ Atoms, GenesisInfo, Token, TokenMeta, TokenOutput, TokenVariant, TxType,
},
token_id::TokenId,
token_type::{AlpTokenType, TokenType},
@@ -39,14 +38,14 @@
const EMPTY_TOKEN_ID: TokenId = TokenId::new(EMPTY_TXID);
const STD: AlpTokenType = AlpTokenType::Standard;
-const MAX: Amount = 0xffff_ffff_ffff;
+const MAX: Atoms = 0xffff_ffff_ffff;
fn make_tx<const N: usize>(script: Script) -> Tx {
Tx::with_txid(
TXID,
TxMut {
outputs: [
- [TxOutput { value: 0, script }].as_ref(),
+ [TxOutput { sats: 0, script }].as_ref(),
&vec![TxOutput::default(); N],
]
.concat(),
@@ -69,7 +68,7 @@
fn amount<const TOKENIDX: usize>(amount: u64) -> Option<TokenOutput> {
Some(TokenOutput {
token_idx: TOKENIDX,
- variant: TokenVariant::Amount(amount),
+ variant: TokenVariant::Atoms(amount),
})
}
@@ -88,7 +87,7 @@
}
fn make_genesis<const N: usize>(
- amounts: [Amount; N],
+ amounts: [Atoms; N],
num_batons: usize,
) -> Bytes {
genesis_section(
@@ -103,7 +102,7 @@
fn make_mint<const N: usize>(
token_id: &TokenId,
- amounts: [Amount; N],
+ amounts: [Atoms; N],
num_batons: usize,
) -> Bytes {
mint_section(
@@ -116,14 +115,11 @@
)
}
-fn make_send<const N: usize>(
- token_id: &TokenId,
- amounts: [Amount; N],
-) -> Bytes {
+fn make_send<const N: usize>(token_id: &TokenId, amounts: [Atoms; N]) -> Bytes {
send_section(token_id, AlpTokenType::Standard, amounts)
}
-fn make_burn(token_id: &TokenId, amount: Amount) -> Bytes {
+fn make_burn(token_id: &TokenId, amount: Atoms) -> Bytes {
burn_section(token_id, AlpTokenType::Standard, amount)
}
@@ -219,7 +215,7 @@
],
intentional_burns: vec![IntentionalBurn {
meta: meta(TOKEN_ID2),
- amount: 2,
+ atoms: 2,
}],
outputs: vec![
None,
@@ -264,13 +260,13 @@
FailedColoring {
pushdata_idx: 3,
parsed: parse(make_mint(&TOKEN_ID2, [0, MAX], 0)),
- error: ColorError::OverlappingAmount {
+ error: ColorError::OverlappingAtoms {
prev_token: Token {
meta: meta(TOKEN_ID),
- variant: TokenVariant::Amount(7),
+ variant: TokenVariant::Atoms(7),
},
output_idx: 2,
- amount: MAX,
+ atoms: MAX,
},
},
// fail MINT: Overlapping batons
@@ -280,7 +276,7 @@
error: ColorError::OverlappingMintBaton {
prev_token: Token {
meta: meta(TOKEN_ID),
- variant: TokenVariant::Amount(7),
+ variant: TokenVariant::Atoms(7),
},
output_idx: 2,
},
diff --git a/chronik/bitcoinsuite-slp/tests/test_color_alp_burn.rs b/chronik/bitcoinsuite-slp/tests/test_color_alp_burn.rs
--- a/chronik/bitcoinsuite-slp/tests/test_color_alp_burn.rs
+++ b/chronik/bitcoinsuite-slp/tests/test_color_alp_burn.rs
@@ -10,7 +10,7 @@
alp::{burn_section, parse_section, sections_opreturn},
color::{ColorError, ColoredTx, FailedColoring, IntentionalBurn},
parsed::ParsedData,
- structs::{Amount, TokenMeta},
+ structs::{Atoms, TokenMeta},
token_id::TokenId,
token_type::{AlpTokenType, TokenType},
};
@@ -24,14 +24,14 @@
const TOKEN_ID3: TokenId = TokenId::new(TXID3);
const STD: AlpTokenType = AlpTokenType::Standard;
-const MAX: Amount = 0xffff_ffff_ffff;
+const MAX: Atoms = 0xffff_ffff_ffff;
fn make_tx<const N: usize>(script: Script) -> Tx {
Tx::with_txid(
TXID,
TxMut {
outputs: [
- [TxOutput { value: 0, script }].as_ref(),
+ [TxOutput { sats: 0, script }].as_ref(),
&vec![TxOutput::default(); N],
]
.concat(),
@@ -51,8 +51,8 @@
parse_section(&TXID, pushdata).unwrap().unwrap()
}
-fn make_burn(token_id: &TokenId, amount: Amount) -> Bytes {
- burn_section(token_id, AlpTokenType::Standard, amount)
+fn make_burn(token_id: &TokenId, atoms: Atoms) -> Bytes {
+ burn_section(token_id, AlpTokenType::Standard, atoms)
}
#[test]
@@ -65,7 +65,7 @@
Some(ColoredTx {
intentional_burns: vec![IntentionalBurn {
meta: meta(TOKEN_ID2),
- amount: 3,
+ atoms: 3,
}],
outputs: vec![None],
failed_colorings: vec![FailedColoring {
@@ -91,7 +91,7 @@
Some(ColoredTx {
intentional_burns: vec![IntentionalBurn {
meta: meta(TOKEN_ID2),
- amount: 3,
+ atoms: 3,
}],
outputs: vec![None],
..Default::default()
@@ -110,11 +110,11 @@
intentional_burns: vec![
IntentionalBurn {
meta: meta(TOKEN_ID2),
- amount: 1,
+ atoms: 1,
},
IntentionalBurn {
meta: meta(TOKEN_ID3),
- amount: MAX,
+ atoms: MAX,
},
],
outputs: vec![None],
diff --git a/chronik/bitcoinsuite-slp/tests/test_color_alp_genesis.rs b/chronik/bitcoinsuite-slp/tests/test_color_alp_genesis.rs
--- a/chronik/bitcoinsuite-slp/tests/test_color_alp_genesis.rs
+++ b/chronik/bitcoinsuite-slp/tests/test_color_alp_genesis.rs
@@ -14,7 +14,7 @@
color::{ColorError, ColoredTx, ColoredTxSection, FailedColoring},
parsed::{ParsedData, ParsedGenesis, ParsedMintData, ParsedTxType},
structs::{
- Amount, GenesisInfo, TokenMeta, TokenOutput, TokenVariant, TxType,
+ Atoms, GenesisInfo, TokenMeta, TokenOutput, TokenVariant, TxType,
},
token_id::TokenId,
token_type::{AlpTokenType, TokenType},
@@ -30,7 +30,7 @@
static INFO: GenesisInfo = GenesisInfo::empty_alp();
-const MAX: Amount = 0xffff_ffff_ffff;
+const MAX: Atoms = 0xffff_ffff_ffff;
const MINT_BATON: Option<TokenOutput> = Some(TokenOutput {
token_idx: 0,
@@ -42,7 +42,7 @@
TXID,
TxMut {
outputs: [
- [TxOutput { value: 0, script }].as_ref(),
+ [TxOutput { sats: 0, script }].as_ref(),
&vec![TxOutput::default(); N],
]
.concat(),
@@ -65,7 +65,7 @@
fn amount(amount: u64) -> Option<TokenOutput> {
Some(TokenOutput {
token_idx: 0,
- variant: TokenVariant::Amount(amount),
+ variant: TokenVariant::Atoms(amount),
})
}
diff --git a/chronik/bitcoinsuite-slp/tests/test_color_alp_mint.rs b/chronik/bitcoinsuite-slp/tests/test_color_alp_mint.rs
--- a/chronik/bitcoinsuite-slp/tests/test_color_alp_mint.rs
+++ b/chronik/bitcoinsuite-slp/tests/test_color_alp_mint.rs
@@ -10,7 +10,7 @@
alp::{mint_section, parse_section, sections_opreturn},
color::{ColorError, ColoredTx, ColoredTxSection, FailedColoring},
parsed::{ParsedData, ParsedMintData},
- structs::{Amount, Token, TokenMeta, TokenOutput, TokenVariant, TxType},
+ structs::{Atoms, Token, TokenMeta, TokenOutput, TokenVariant, TxType},
token_id::TokenId,
token_type::{AlpTokenType, TokenType},
};
@@ -29,14 +29,14 @@
const EMPTY_TOKEN_ID: TokenId = TokenId::new(EMPTY_TXID);
const STD: AlpTokenType = AlpTokenType::Standard;
-const MAX: Amount = 0xffff_ffff_ffff;
+const MAX: Atoms = 0xffff_ffff_ffff;
fn make_tx<const N: usize>(script: Script) -> Tx {
Tx::with_txid(
TXID,
TxMut {
outputs: [
- [TxOutput { value: 0, script }].as_ref(),
+ [TxOutput { sats: 0, script }].as_ref(),
&vec![TxOutput::default(); N],
]
.concat(),
@@ -69,7 +69,7 @@
fn amount<const TOKENIDX: usize>(amount: u64) -> Option<TokenOutput> {
Some(TokenOutput {
token_idx: TOKENIDX,
- variant: TokenVariant::Amount(amount),
+ variant: TokenVariant::Atoms(amount),
})
}
@@ -89,7 +89,7 @@
fn make_mint<const N: usize>(
token_id: &TokenId,
- amounts: [Amount; N],
+ amounts: [Atoms; N],
num_batons: usize,
) -> Bytes {
mint_section(
@@ -240,13 +240,13 @@
failed_colorings: vec![FailedColoring {
pushdata_idx: 1,
parsed: parse(make_mint(&TOKEN_ID3, [0, 777, 1], 0)),
- error: ColorError::OverlappingAmount {
+ error: ColorError::OverlappingAtoms {
prev_token: Token {
meta: meta(TOKEN_ID2),
variant: TokenVariant::MintBaton,
},
output_idx: 2,
- amount: 777,
+ atoms: 777,
},
}],
..Default::default()
@@ -270,7 +270,7 @@
error: ColorError::OverlappingMintBaton {
prev_token: Token {
meta: meta(TOKEN_ID2),
- variant: TokenVariant::Amount(9),
+ variant: TokenVariant::Atoms(9),
},
output_idx: 3,
},
diff --git a/chronik/bitcoinsuite-slp/tests/test_color_alp_send.rs b/chronik/bitcoinsuite-slp/tests/test_color_alp_send.rs
--- a/chronik/bitcoinsuite-slp/tests/test_color_alp_send.rs
+++ b/chronik/bitcoinsuite-slp/tests/test_color_alp_send.rs
@@ -10,7 +10,7 @@
alp::{parse_section, sections_opreturn, send_section},
color::{ColorError, ColoredTx, ColoredTxSection, FailedColoring},
parsed::ParsedData,
- structs::{Amount, Token, TokenMeta, TokenOutput, TokenVariant, TxType},
+ structs::{Atoms, Token, TokenMeta, TokenOutput, TokenVariant, TxType},
token_id::TokenId,
token_type::{AlpTokenType, TokenType},
};
@@ -27,14 +27,14 @@
const STD: AlpTokenType = AlpTokenType::Standard;
-const MAX: Amount = 0xffff_ffff_ffff;
+const MAX: Atoms = 0xffff_ffff_ffff;
fn make_tx<const N: usize>(script: Script) -> Tx {
Tx::with_txid(
TXID,
TxMut {
outputs: [
- [TxOutput { value: 0, script }].as_ref(),
+ [TxOutput { sats: 0, script }].as_ref(),
&vec![TxOutput::default(); N],
]
.concat(),
@@ -70,14 +70,11 @@
fn amount<const TOKENIDX: usize>(amount: u64) -> Option<TokenOutput> {
Some(TokenOutput {
token_idx: TOKENIDX,
- variant: TokenVariant::Amount(amount),
+ variant: TokenVariant::Atoms(amount),
})
}
-fn make_send<const N: usize>(
- token_id: &TokenId,
- amounts: [Amount; N],
-) -> Bytes {
+fn make_send<const N: usize>(token_id: &TokenId, amounts: [Atoms; N]) -> Bytes {
send_section(token_id, AlpTokenType::Standard, amounts)
}
@@ -140,13 +137,13 @@
failed_colorings: vec![FailedColoring {
pushdata_idx: 1,
parsed: parse(make_send(&TOKEN_ID3, [3, 7, MAX])),
- error: ColorError::OverlappingAmount {
+ error: ColorError::OverlappingAtoms {
prev_token: Token {
meta: meta(TOKEN_ID2),
- variant: TokenVariant::Amount(2),
+ variant: TokenVariant::Atoms(2),
},
output_idx: 2,
- amount: 7,
+ atoms: 7,
},
}],
..Default::default()
diff --git a/chronik/bitcoinsuite-slp/tests/test_color_common.rs b/chronik/bitcoinsuite-slp/tests/test_color_common.rs
--- a/chronik/bitcoinsuite-slp/tests/test_color_common.rs
+++ b/chronik/bitcoinsuite-slp/tests/test_color_common.rs
@@ -21,7 +21,7 @@
Tx::with_txid(
TxId::new([4; 32]),
TxMut {
- outputs: vec![TxOutput { value: 0, script }],
+ outputs: vec![TxOutput { sats: 0, script }],
..Default::default()
},
)
diff --git a/chronik/bitcoinsuite-slp/tests/test_color_slp.rs b/chronik/bitcoinsuite-slp/tests/test_color_slp.rs
--- a/chronik/bitcoinsuite-slp/tests/test_color_slp.rs
+++ b/chronik/bitcoinsuite-slp/tests/test_color_slp.rs
@@ -31,7 +31,7 @@
TXID,
TxMut {
outputs: [
- [TxOutput { value: 0, script }].as_ref(),
+ [TxOutput { sats: 0, script }].as_ref(),
&vec![TxOutput::default(); n_extra_outputs],
]
.concat(),
@@ -47,10 +47,10 @@
}
}
-fn amount(amount: u64) -> Option<TokenOutput> {
+fn atoms(atoms: u64) -> Option<TokenOutput> {
Some(TokenOutput {
token_idx: 0,
- variant: TokenVariant::Amount(amount),
+ variant: TokenVariant::Atoms(atoms),
})
}
@@ -112,7 +112,7 @@
4,
)),
Some(ColoredTx {
- outputs: vec![None, amount(amt), None, None, None],
+ outputs: vec![None, atoms(amt), None, None, None],
sections: vec![ColoredTxSection {
meta: meta(TOKEN_ID, token_type),
tx_type: TxType::GENESIS,
@@ -131,7 +131,7 @@
for token_type in [SlpTokenType::Fungible, SlpTokenType::Nft1Group] {
for out_idx in 2..=255 {
let mut outputs = vec![None; out_idx + 1];
- outputs[1] = amount(1234567);
+ outputs[1] = atoms(1234567);
outputs[out_idx] = MINT_BATON;
assert_eq!(
ColoredTx::color_tx(&make_tx(
@@ -211,7 +211,7 @@
1,
)),
Some(ColoredTx {
- outputs: vec![None, amount(7777)],
+ outputs: vec![None, atoms(7777)],
sections: vec![ColoredTxSection {
has_colored_out_of_range: true,
..section.clone()
@@ -236,7 +236,7 @@
4,
)),
Some(ColoredTx {
- outputs: vec![None, amount(amt), None, None, None],
+ outputs: vec![None, atoms(amt), None, None, None],
sections: vec![ColoredTxSection {
meta: meta(TOKEN_ID1, token_type),
tx_type: TxType::MINT,
@@ -257,7 +257,7 @@
// Zero amounts are colored as None
let outputs = [None, None]
.into_iter()
- .chain((1..num_amounts).map(amount))
+ .chain((1..num_amounts).map(atoms))
.collect::<Vec<_>>();
assert_eq!(
ColoredTx::color_tx(&make_tx(
@@ -331,7 +331,7 @@
1,
)),
Some(ColoredTx {
- outputs: vec![None, amount(7777)],
+ outputs: vec![None, atoms(7777)],
sections: vec![ColoredTxSection {
has_colored_out_of_range: true,
..section.clone()
@@ -402,7 +402,7 @@
2,
)),
Some(ColoredTx {
- outputs: vec![None, amount(4444), None],
+ outputs: vec![None, atoms(4444), None],
sections: vec![ColoredTxSection {
meta: meta(TOKEN_ID1, token_type),
tx_type: TxType::SEND,
@@ -421,7 +421,7 @@
// Zero amounts are colored as None
let outputs = [None, None]
.into_iter()
- .chain((1..num_amounts).map(amount))
+ .chain((1..num_amounts).map(atoms))
.collect::<Vec<_>>();
assert_eq!(
ColoredTx::color_tx(&make_tx(
@@ -517,7 +517,7 @@
sections: vec![],
intentional_burns: vec![IntentionalBurn {
meta: meta(TOKEN_ID1, token_type),
- amount: 3333,
+ atoms: 3333,
}],
..Default::default()
}),
diff --git a/chronik/bitcoinsuite-slp/tests/test_slp_parse_genesis.rs b/chronik/bitcoinsuite-slp/tests/test_slp_parse_genesis.rs
--- a/chronik/bitcoinsuite-slp/tests/test_slp_parse_genesis.rs
+++ b/chronik/bitcoinsuite-slp/tests/test_slp_parse_genesis.rs
@@ -6,7 +6,7 @@
use bitcoinsuite_slp::{
parsed::{ParsedData, ParsedGenesis, ParsedMintData, ParsedTxType},
slp::{genesis_opreturn, parse, ParseError},
- structs::{Amount, GenesisInfo, TokenMeta},
+ structs::{Atoms, GenesisInfo, TokenMeta},
token_id::TokenId,
token_type::{SlpTokenType, TokenType},
};
@@ -353,11 +353,11 @@
mint_data: match token_type {
SlpTokenType::MintVault | SlpTokenType::Nft1Child =>
ParsedMintData {
- amounts: vec![qty as Amount],
+ amounts: vec![qty as Atoms],
num_batons: 0,
},
_ => ParsedMintData {
- amounts: vec![qty as Amount, 0, 0],
+ amounts: vec![qty as Atoms, 0, 0],
num_batons: 1,
},
},
diff --git a/chronik/bitcoinsuite-slp/tests/test_verify_alp_all_the_things.rs b/chronik/bitcoinsuite-slp/tests/test_verify_alp_all_the_things.rs
--- a/chronik/bitcoinsuite-slp/tests/test_verify_alp_all_the_things.rs
+++ b/chronik/bitcoinsuite-slp/tests/test_verify_alp_all_the_things.rs
@@ -11,7 +11,7 @@
structs::{GenesisInfo, TxType},
test_helpers::{
alp_mint, empty_entry, meta_alp as meta, meta_alp_unknown, parse_alp,
- spent_amount, spent_baton, token_amount, token_baton, token_unknown,
+ spent_atoms, spent_baton, token_atoms, token_baton, token_unknown,
verify, EMPTY_TOKEN_ID, TOKEN_ID1, TOKEN_ID2, TOKEN_ID3, TOKEN_ID4,
},
token_tx::{TokenTx, TokenTxEntry},
@@ -71,8 +71,8 @@
&[
spent_baton(meta(TOKEN_ID3)),
spent_baton(meta(TOKEN_ID2)),
- spent_amount(meta(TOKEN_ID4), 0xffff_ffff_ffff - 2),
- spent_amount(meta(TOKEN_ID4), 7),
+ spent_atoms(meta(TOKEN_ID4), 0xffff_ffff_ffff - 2),
+ spent_atoms(meta(TOKEN_ID4), 7),
],
),
TokenTx {
@@ -100,7 +100,7 @@
TokenTxEntry {
meta: meta(TOKEN_ID2),
tx_type: Some(TxType::MINT),
- intentional_burn_amount: Some(2),
+ intentional_burn_atoms: Some(2),
failed_colorings: vec![
FailedColoring {
pushdata_idx: 2,
@@ -146,7 +146,7 @@
TokenTxEntry {
meta: meta(TOKEN_ID4),
tx_type: Some(TxType::SEND),
- actual_burn_amount: 5,
+ actual_burn_atoms: 5,
failed_colorings: vec![FailedColoring {
pushdata_idx: 9,
parsed: parse_alp(alp_mint(&TOKEN_ID4, [], 0)),
@@ -170,16 +170,16 @@
],
outputs: vec![
None,
- token_amount::<1>(3),
- token_amount::<0>(7),
+ token_atoms::<1>(3),
+ token_atoms::<0>(7),
token_baton::<1>(),
- token_amount::<2>(2),
- token_amount::<0>(1),
+ token_atoms::<2>(2),
+ token_atoms::<0>(1),
token_baton::<0>(),
token_baton::<0>(),
token_baton::<2>(),
token_unknown::<4>(0x89),
- token_amount::<3>(0xffff_ffff_ffff),
+ token_atoms::<3>(0xffff_ffff_ffff),
],
failed_parsings: vec![FailedParsing {
pushdata_idx: Some(12),
diff --git a/chronik/bitcoinsuite-slp/tests/test_verify_alp_burn.rs b/chronik/bitcoinsuite-slp/tests/test_verify_alp_burn.rs
--- a/chronik/bitcoinsuite-slp/tests/test_verify_alp_burn.rs
+++ b/chronik/bitcoinsuite-slp/tests/test_verify_alp_burn.rs
@@ -6,7 +6,7 @@
alp::{burn_section, sections_opreturn, send_section},
structs::TxType,
test_helpers::{
- empty_entry, meta_alp as meta, spent_amount, spent_baton, token_amount,
+ empty_entry, meta_alp as meta, spent_atoms, spent_baton, token_atoms,
verify, TOKEN_ID2, TOKEN_ID3, TOKEN_ID4, TOKEN_ID5,
},
token_tx::{TokenTx, TokenTxEntry},
@@ -26,7 +26,7 @@
entries: vec![TokenTxEntry {
meta: meta(TOKEN_ID2),
tx_type: Some(TxType::BURN),
- intentional_burn_amount: Some(400),
+ intentional_burn_atoms: Some(400),
..empty_entry()
}],
outputs: vec![None],
@@ -49,7 +49,7 @@
entries: vec![TokenTxEntry {
meta: meta(TOKEN_ID2),
tx_type: Some(TxType::SEND),
- intentional_burn_amount: Some(400),
+ intentional_burn_atoms: Some(400),
is_invalid: true,
burn_error: Some(BurnError::InsufficientInputSum {
required: 200,
@@ -77,7 +77,7 @@
entries: vec![TokenTxEntry {
meta: meta(TOKEN_ID2),
tx_type: Some(TxType::SEND),
- intentional_burn_amount: Some(400),
+ intentional_burn_atoms: Some(400),
is_invalid: true,
burn_error: Some(BurnError::InsufficientInputSum {
required: 200,
@@ -99,17 +99,17 @@
send_section(&TOKEN_ID2, Standard, [80, 20]),
burn_section(&TOKEN_ID2, Standard, 400),
]),
- &[spent_amount(meta(TOKEN_ID2), 500)],
+ &[spent_atoms(meta(TOKEN_ID2), 500)],
),
TokenTx {
entries: vec![TokenTxEntry {
meta: meta(TOKEN_ID2),
tx_type: Some(TxType::SEND),
- actual_burn_amount: 400,
- intentional_burn_amount: Some(400),
+ actual_burn_atoms: 400,
+ intentional_burn_atoms: Some(400),
..empty_entry()
}],
- outputs: vec![None, token_amount::<0>(80), token_amount::<0>(20)],
+ outputs: vec![None, token_atoms::<0>(80), token_atoms::<0>(20)],
failed_parsings: vec![],
},
);
@@ -127,11 +127,11 @@
burn_section(&TOKEN_ID4, Standard, 3000),
]),
&[
- spent_amount(meta(TOKEN_ID2), 500),
- spent_amount(meta(TOKEN_ID3), 7000),
- spent_amount(meta(TOKEN_ID4), 2000),
+ spent_atoms(meta(TOKEN_ID2), 500),
+ spent_atoms(meta(TOKEN_ID3), 7000),
+ spent_atoms(meta(TOKEN_ID4), 2000),
spent_baton(meta(TOKEN_ID4)),
- spent_amount(meta(TOKEN_ID5), 500),
+ spent_atoms(meta(TOKEN_ID5), 500),
],
),
TokenTx {
@@ -139,39 +139,39 @@
TokenTxEntry {
meta: meta(TOKEN_ID2),
tx_type: Some(TxType::SEND),
- actual_burn_amount: 400,
- intentional_burn_amount: Some(400),
+ actual_burn_atoms: 400,
+ intentional_burn_atoms: Some(400),
..empty_entry()
},
TokenTxEntry {
meta: meta(TOKEN_ID3),
tx_type: Some(TxType::SEND),
- actual_burn_amount: 1000,
- intentional_burn_amount: Some(1000),
+ actual_burn_atoms: 1000,
+ intentional_burn_atoms: Some(1000),
..empty_entry()
},
TokenTxEntry {
meta: meta(TOKEN_ID4),
tx_type: Some(TxType::BURN),
- actual_burn_amount: 2000,
- intentional_burn_amount: Some(3000),
+ actual_burn_atoms: 2000,
+ intentional_burn_atoms: Some(3000),
is_invalid: true,
burns_mint_batons: true,
..empty_entry()
},
TokenTxEntry {
meta: meta(TOKEN_ID5),
- actual_burn_amount: 500,
+ actual_burn_atoms: 500,
is_invalid: true,
..empty_entry()
},
],
outputs: vec![
None,
- token_amount::<0>(80),
- token_amount::<0>(20),
- token_amount::<1>(5000),
- token_amount::<1>(1000),
+ token_atoms::<0>(80),
+ token_atoms::<0>(20),
+ token_atoms::<1>(5000),
+ token_atoms::<1>(1000),
],
failed_parsings: vec![],
},
@@ -188,9 +188,9 @@
send_section(&TOKEN_ID4, Standard, [0, 0, 0, 0, 500]),
]),
&[
- spent_amount(meta(TOKEN_ID2), 150),
- spent_amount(meta(TOKEN_ID3), 14000),
- spent_amount(meta(TOKEN_ID5), 700),
+ spent_atoms(meta(TOKEN_ID2), 150),
+ spent_atoms(meta(TOKEN_ID3), 14000),
+ spent_atoms(meta(TOKEN_ID5), 700),
],
),
TokenTx {
@@ -198,13 +198,13 @@
TokenTxEntry {
meta: meta(TOKEN_ID2),
tx_type: Some(TxType::SEND),
- actual_burn_amount: 50,
+ actual_burn_atoms: 50,
..empty_entry()
},
TokenTxEntry {
meta: meta(TOKEN_ID3),
tx_type: Some(TxType::SEND),
- actual_burn_amount: 4000,
+ actual_burn_atoms: 4000,
..empty_entry()
},
TokenTxEntry {
@@ -220,16 +220,16 @@
TokenTxEntry {
meta: meta(TOKEN_ID5),
is_invalid: true,
- actual_burn_amount: 700,
+ actual_burn_atoms: 700,
..empty_entry()
},
],
outputs: vec![
None,
- token_amount::<0>(80),
- token_amount::<1>(8000),
- token_amount::<0>(20),
- token_amount::<1>(2000),
+ token_atoms::<0>(80),
+ token_atoms::<1>(8000),
+ token_atoms::<0>(20),
+ token_atoms::<1>(2000),
None,
],
failed_parsings: vec![],
diff --git a/chronik/bitcoinsuite-slp/tests/test_verify_alp_common.rs b/chronik/bitcoinsuite-slp/tests/test_verify_alp_common.rs
--- a/chronik/bitcoinsuite-slp/tests/test_verify_alp_common.rs
+++ b/chronik/bitcoinsuite-slp/tests/test_verify_alp_common.rs
@@ -9,8 +9,8 @@
empp,
structs::TxType,
test_helpers::{
- alp_mint, empty_entry, meta_alp as meta, parse_alp, spent_amount,
- token_amount, verify, TOKEN_ID2,
+ alp_mint, empty_entry, meta_alp as meta, parse_alp, spent_atoms,
+ token_atoms, verify, TOKEN_ID2,
},
token_tx::{TokenTx, TokenTxEntry},
token_type::AlpTokenType::*,
@@ -84,7 +84,7 @@
fn test_verify_alp_more_than_32767_inputs_invalid() {
// More than 32767 inputs disallowed in ALP
let spent_tokens = vec![
- spent_amount(meta(TOKEN_ID2), 0xffff_ffff_ffff);
+ spent_atoms(meta(TOKEN_ID2), 0xffff_ffff_ffff);
alp::consts::MAX_TX_INPUTS + 1
];
assert_eq!(
@@ -100,7 +100,7 @@
entries: vec![TokenTxEntry {
meta: meta(TOKEN_ID2),
tx_type: Some(TxType::SEND),
- actual_burn_amount: 32768 * 0xffff_ffff_ffff,
+ actual_burn_atoms: 32768 * 0xffff_ffff_ffff,
is_invalid: true,
burn_error: Some(BurnError::TooManyTxInputs(32768)),
..empty_entry()
@@ -115,7 +115,7 @@
fn test_verify_alp_32767_inputs_valid() {
// 32767 inputs allowed in ALP
let spent_tokens = vec![
- spent_amount(meta(TOKEN_ID2), 0xffff_ffff_ffff);
+ spent_atoms(meta(TOKEN_ID2), 0xffff_ffff_ffff);
alp::consts::MAX_TX_INPUTS
];
assert_eq!(
@@ -131,10 +131,10 @@
entries: vec![TokenTxEntry {
meta: meta(TOKEN_ID2),
tx_type: Some(TxType::SEND),
- actual_burn_amount: 32766 * 0xffff_ffff_ffff,
+ actual_burn_atoms: 32766 * 0xffff_ffff_ffff,
..empty_entry()
}],
- outputs: vec![None, token_amount::<0>(0xffff_ffff_ffff)],
+ outputs: vec![None, token_atoms::<0>(0xffff_ffff_ffff)],
failed_parsings: vec![],
},
);
diff --git a/chronik/bitcoinsuite-slp/tests/test_verify_alp_genesis.rs b/chronik/bitcoinsuite-slp/tests/test_verify_alp_genesis.rs
--- a/chronik/bitcoinsuite-slp/tests/test_verify_alp_genesis.rs
+++ b/chronik/bitcoinsuite-slp/tests/test_verify_alp_genesis.rs
@@ -7,7 +7,7 @@
parsed::ParsedMintData,
structs::{GenesisInfo, TxType},
test_helpers::{
- empty_entry, meta_alp as meta, spent_amount, token_amount, token_baton,
+ empty_entry, meta_alp as meta, spent_atoms, token_atoms, token_baton,
verify, TOKEN_ID1, TOKEN_ID2, TOKEN_ID3,
},
token_tx::{TokenTx, TokenTxEntry},
@@ -48,9 +48,9 @@
}],
outputs: vec![
None,
- token_amount::<0>(200),
+ token_atoms::<0>(200),
None,
- token_amount::<0>(300),
+ token_atoms::<0>(300),
token_baton::<0>(),
token_baton::<0>(),
],
@@ -72,8 +72,8 @@
},
)]),
&[
- spent_amount(meta(TOKEN_ID2), 100),
- spent_amount(meta(TOKEN_ID3), 2000),
+ spent_atoms(meta(TOKEN_ID2), 100),
+ spent_atoms(meta(TOKEN_ID3), 2000),
],
),
TokenTx {
@@ -87,21 +87,21 @@
TokenTxEntry {
meta: meta(TOKEN_ID2),
is_invalid: true,
- actual_burn_amount: 100,
+ actual_burn_atoms: 100,
..empty_entry()
},
TokenTxEntry {
meta: meta(TOKEN_ID3),
is_invalid: true,
- actual_burn_amount: 2000,
+ actual_burn_atoms: 2000,
..empty_entry()
},
],
outputs: vec![
None,
- token_amount::<0>(200),
+ token_atoms::<0>(200),
None,
- token_amount::<0>(300),
+ token_atoms::<0>(300),
token_baton::<0>(),
token_baton::<0>(),
],
diff --git a/chronik/bitcoinsuite-slp/tests/test_verify_alp_mint.rs b/chronik/bitcoinsuite-slp/tests/test_verify_alp_mint.rs
--- a/chronik/bitcoinsuite-slp/tests/test_verify_alp_mint.rs
+++ b/chronik/bitcoinsuite-slp/tests/test_verify_alp_mint.rs
@@ -6,7 +6,7 @@
alp::sections_opreturn,
structs::TxType,
test_helpers::{
- alp_mint, empty_entry, meta_alp as meta, spent_baton, token_amount,
+ alp_mint, empty_entry, meta_alp as meta, spent_baton, token_atoms,
token_baton, verify, TOKEN_ID2, TOKEN_ID3, TOKEN_ID4, TOKEN_ID5,
},
token_tx::{TokenTx, TokenTxEntry},
@@ -78,7 +78,7 @@
tx_type: Some(TxType::MINT),
..empty_entry()
}],
- outputs: vec![None, token_amount::<0>(100), token_baton::<0>()],
+ outputs: vec![None, token_atoms::<0>(100), token_baton::<0>()],
failed_parsings: vec![],
},
);
@@ -128,8 +128,8 @@
],
outputs: vec![
None,
- token_amount::<0>(100),
- token_amount::<1>(15),
+ token_atoms::<0>(100),
+ token_atoms::<1>(15),
token_baton::<0>(),
token_baton::<1>(),
None,
diff --git a/chronik/bitcoinsuite-slp/tests/test_verify_alp_send.rs b/chronik/bitcoinsuite-slp/tests/test_verify_alp_send.rs
--- a/chronik/bitcoinsuite-slp/tests/test_verify_alp_send.rs
+++ b/chronik/bitcoinsuite-slp/tests/test_verify_alp_send.rs
@@ -6,7 +6,7 @@
alp::{sections_opreturn, send_section},
structs::TxType,
test_helpers::{
- empty_entry, meta_alp as meta, spent_amount, token_amount, verify,
+ empty_entry, meta_alp as meta, spent_atoms, token_atoms, verify,
TOKEN_ID2, TOKEN_ID3, TOKEN_ID4, TOKEN_ID5,
},
token_tx::{TokenTx, TokenTxEntry},
@@ -52,13 +52,13 @@
Standard,
[80, 20],
)]),
- &[spent_amount(meta(TOKEN_ID2), 90)],
+ &[spent_atoms(meta(TOKEN_ID2), 90)],
),
TokenTx {
entries: vec![TokenTxEntry {
meta: meta(TOKEN_ID2),
tx_type: Some(TxType::SEND),
- actual_burn_amount: 90,
+ actual_burn_atoms: 90,
is_invalid: true,
burn_error: Some(BurnError::InsufficientInputSum {
required: 100,
@@ -81,7 +81,7 @@
Standard,
[80, 20],
)]),
- &[spent_amount(meta(TOKEN_ID3), 100)],
+ &[spent_atoms(meta(TOKEN_ID3), 100)],
),
TokenTx {
entries: vec![
@@ -99,7 +99,7 @@
meta: meta(TOKEN_ID3),
tx_type: None,
is_invalid: true,
- actual_burn_amount: 100,
+ actual_burn_atoms: 100,
..empty_entry()
}
],
@@ -118,7 +118,7 @@
Standard,
[80, 20],
)]),
- &[spent_amount(meta(TOKEN_ID2), 100)],
+ &[spent_atoms(meta(TOKEN_ID2), 100)],
),
TokenTx {
entries: vec![TokenTxEntry {
@@ -126,7 +126,7 @@
tx_type: Some(TxType::SEND),
..empty_entry()
}],
- outputs: vec![None, token_amount::<0>(80), token_amount::<0>(20)],
+ outputs: vec![None, token_atoms::<0>(80), token_atoms::<0>(20)],
failed_parsings: vec![],
},
);
@@ -163,9 +163,9 @@
send_section(&TOKEN_ID4, Standard, [0, 0, 0, 0, 500]),
]),
&[
- spent_amount(meta(TOKEN_ID2), 150),
- spent_amount(meta(TOKEN_ID3), 14000),
- spent_amount(meta(TOKEN_ID5), 700),
+ spent_atoms(meta(TOKEN_ID2), 150),
+ spent_atoms(meta(TOKEN_ID3), 14000),
+ spent_atoms(meta(TOKEN_ID5), 700),
],
),
TokenTx {
@@ -173,13 +173,13 @@
TokenTxEntry {
meta: meta(TOKEN_ID2),
tx_type: Some(TxType::SEND),
- actual_burn_amount: 50,
+ actual_burn_atoms: 50,
..empty_entry()
},
TokenTxEntry {
meta: meta(TOKEN_ID3),
tx_type: Some(TxType::SEND),
- actual_burn_amount: 4000,
+ actual_burn_atoms: 4000,
..empty_entry()
},
TokenTxEntry {
@@ -195,16 +195,16 @@
TokenTxEntry {
meta: meta(TOKEN_ID5),
is_invalid: true,
- actual_burn_amount: 700,
+ actual_burn_atoms: 700,
..empty_entry()
},
],
outputs: vec![
None,
- token_amount::<0>(80),
- token_amount::<1>(8000),
- token_amount::<0>(20),
- token_amount::<1>(2000),
+ token_atoms::<0>(80),
+ token_atoms::<1>(8000),
+ token_atoms::<0>(20),
+ token_atoms::<1>(2000),
None,
],
failed_parsings: vec![],
diff --git a/chronik/bitcoinsuite-slp/tests/test_verify_slp_burn.rs b/chronik/bitcoinsuite-slp/tests/test_verify_slp_burn.rs
--- a/chronik/bitcoinsuite-slp/tests/test_verify_slp_burn.rs
+++ b/chronik/bitcoinsuite-slp/tests/test_verify_slp_burn.rs
@@ -6,7 +6,7 @@
slp::burn_opreturn,
structs::TxType,
test_helpers::{
- empty_entry, meta_slp as meta, spent_amount, spent_baton, verify,
+ empty_entry, meta_slp as meta, spent_atoms, spent_baton, verify,
TOKEN_ID2, TOKEN_ID3,
},
token_tx::{TokenTx, TokenTxEntry},
@@ -20,20 +20,20 @@
assert_eq!(
verify::<1>(
burn_opreturn(&TOKEN_ID2, token_type, 10),
- &[spent_amount(meta(TOKEN_ID3, token_type), 10)],
+ &[spent_atoms(meta(TOKEN_ID3, token_type), 10)],
),
TokenTx {
entries: vec![
TokenTxEntry {
meta: meta(TOKEN_ID2, token_type),
tx_type: Some(TxType::BURN),
- intentional_burn_amount: Some(10),
+ intentional_burn_atoms: Some(10),
..empty_entry()
},
TokenTxEntry {
meta: meta(TOKEN_ID3, token_type),
is_invalid: true,
- actual_burn_amount: 10,
+ actual_burn_atoms: 10,
..empty_entry()
},
],
@@ -54,20 +54,20 @@
assert_eq!(
verify::<1>(
burn_opreturn(&TOKEN_ID2, wrong_token_type, 10),
- &[spent_amount(meta(TOKEN_ID3, token_type), 10)],
+ &[spent_atoms(meta(TOKEN_ID3, token_type), 10)],
),
TokenTx {
entries: vec![
TokenTxEntry {
meta: meta(TOKEN_ID2, wrong_token_type),
tx_type: Some(TxType::BURN),
- intentional_burn_amount: Some(10),
+ intentional_burn_atoms: Some(10),
..empty_entry()
},
TokenTxEntry {
meta: meta(TOKEN_ID3, token_type),
is_invalid: true,
- actual_burn_amount: 10,
+ actual_burn_atoms: 10,
..empty_entry()
},
],
@@ -91,7 +91,7 @@
entries: vec![TokenTxEntry {
meta: meta(TOKEN_ID2, token_type),
tx_type: Some(TxType::BURN),
- intentional_burn_amount: Some(10),
+ intentional_burn_atoms: Some(10),
is_invalid: true,
burns_mint_batons: true,
..empty_entry()
@@ -109,14 +109,14 @@
assert_eq!(
verify::<1>(
burn_opreturn(&TOKEN_ID2, token_type, 10),
- &[spent_amount(meta(TOKEN_ID2, token_type), 9)],
+ &[spent_atoms(meta(TOKEN_ID2, token_type), 9)],
),
TokenTx {
entries: vec![TokenTxEntry {
meta: meta(TOKEN_ID2, token_type),
tx_type: Some(TxType::BURN),
- intentional_burn_amount: Some(10),
- actual_burn_amount: 9,
+ intentional_burn_atoms: Some(10),
+ actual_burn_atoms: 9,
..empty_entry()
},],
outputs: vec![None, None],
@@ -132,14 +132,14 @@
assert_eq!(
verify::<1>(
burn_opreturn(&TOKEN_ID2, token_type, 10),
- &[spent_amount(meta(TOKEN_ID2, token_type), 10)],
+ &[spent_atoms(meta(TOKEN_ID2, token_type), 10)],
),
TokenTx {
entries: vec![TokenTxEntry {
meta: meta(TOKEN_ID2, token_type),
tx_type: Some(TxType::BURN),
- intentional_burn_amount: Some(10),
- actual_burn_amount: 10,
+ intentional_burn_atoms: Some(10),
+ actual_burn_atoms: 10,
..empty_entry()
},],
outputs: vec![None, None],
diff --git a/chronik/bitcoinsuite-slp/tests/test_verify_slp_common.rs b/chronik/bitcoinsuite-slp/tests/test_verify_slp_common.rs
--- a/chronik/bitcoinsuite-slp/tests/test_verify_slp_common.rs
+++ b/chronik/bitcoinsuite-slp/tests/test_verify_slp_common.rs
@@ -9,7 +9,7 @@
slp::{self, send_opreturn},
structs::TxType,
test_helpers::{
- empty_entry, meta_slp as meta, spent_amount, token_amount, verify,
+ empty_entry, meta_slp as meta, spent_atoms, token_atoms, verify,
TOKEN_ID2,
},
token_tx::{TokenTx, TokenTxEntry},
@@ -39,7 +39,7 @@
// More than 32767 inputs allowed in SLP
let spent_tokens =
vec![
- spent_amount(meta(TOKEN_ID2, token_type), u64::MAX);
+ spent_atoms(meta(TOKEN_ID2, token_type), u64::MAX);
alp::consts::MAX_TX_INPUTS + 1
];
assert_eq!(
@@ -51,10 +51,10 @@
entries: vec![TokenTxEntry {
meta: meta(TOKEN_ID2, token_type),
tx_type: Some(TxType::SEND),
- actual_burn_amount: 32767 * u64::MAX as u128,
+ actual_burn_atoms: 32767 * u64::MAX as u128,
..empty_entry()
}],
- outputs: vec![None, token_amount::<0>(u64::MAX)],
+ outputs: vec![None, token_atoms::<0>(u64::MAX)],
failed_parsings: vec![],
},
);
@@ -67,17 +67,17 @@
assert_eq!(
verify::<1>(
send_opreturn(&TOKEN_ID2, token_type, &[100, 200, 300]),
- &[spent_amount(meta(TOKEN_ID2, token_type), 600)],
+ &[spent_atoms(meta(TOKEN_ID2, token_type), 600)],
),
TokenTx {
entries: vec![TokenTxEntry {
meta: meta(TOKEN_ID2, token_type),
tx_type: Some(TxType::SEND),
- actual_burn_amount: 500,
+ actual_burn_atoms: 500,
has_colored_out_of_range: true,
..empty_entry()
}],
- outputs: vec![None, token_amount::<0>(100)],
+ outputs: vec![None, token_atoms::<0>(100)],
failed_parsings: vec![],
},
);
diff --git a/chronik/bitcoinsuite-slp/tests/test_verify_slp_genesis.rs b/chronik/bitcoinsuite-slp/tests/test_verify_slp_genesis.rs
--- a/chronik/bitcoinsuite-slp/tests/test_verify_slp_genesis.rs
+++ b/chronik/bitcoinsuite-slp/tests/test_verify_slp_genesis.rs
@@ -7,7 +7,7 @@
slp::genesis_opreturn,
structs::{GenesisInfo, TxType},
test_helpers::{
- empty_entry, meta_alp, meta_slp as meta, spent_amount, token_amount,
+ empty_entry, meta_alp, meta_slp as meta, spent_atoms, token_atoms,
token_baton, verify, INFO_SLP as INFO, TOKEN_ID1, TOKEN_ID2, TOKEN_ID3,
},
token_tx::{TokenTx, TokenTxEntry},
@@ -42,7 +42,7 @@
assert_eq!(
verify::<1>(
genesis_opreturn(&INFO, Nft1Child, None, 1),
- &[spent_amount(meta(TOKEN_ID2, in_token_type), 1)],
+ &[spent_atoms(meta(TOKEN_ID2, in_token_type), 1)],
),
TokenTx {
entries: vec![
@@ -56,7 +56,7 @@
},
TokenTxEntry {
meta: meta(TOKEN_ID2, in_token_type),
- actual_burn_amount: 1,
+ actual_burn_atoms: 1,
is_invalid: true,
..empty_entry()
},
@@ -74,7 +74,7 @@
verify::<1>(
genesis_opreturn(&INFO, Nft1Child, None, 1),
// NFT1 Group must be at idx 0, is at idx 1
- &[None, spent_amount(meta(TOKEN_ID2, Nft1Group), 1)],
+ &[None, spent_atoms(meta(TOKEN_ID2, Nft1Group), 1)],
),
TokenTx {
entries: vec![
@@ -88,7 +88,7 @@
},
TokenTxEntry {
meta: meta(TOKEN_ID2, Nft1Group),
- actual_burn_amount: 1,
+ actual_burn_atoms: 1,
is_invalid: true,
..empty_entry()
},
@@ -104,7 +104,7 @@
assert_eq!(
verify::<1>(
genesis_opreturn(&INFO, Nft1Child, None, 1),
- &[spent_amount(meta(TOKEN_ID2, Nft1Group), 1)],
+ &[spent_atoms(meta(TOKEN_ID2, Nft1Group), 1)],
),
TokenTx {
entries: vec![
@@ -120,7 +120,7 @@
..empty_entry()
},
],
- outputs: vec![None, token_amount::<0>(1)],
+ outputs: vec![None, token_atoms::<0>(1)],
failed_parsings: vec![],
},
);
@@ -141,7 +141,7 @@
genesis_info: Some(INFO.clone()),
..empty_entry()
}],
- outputs: vec![None, token_amount::<0>(99), token_baton::<0>()],
+ outputs: vec![None, token_atoms::<0>(99), token_baton::<0>()],
failed_parsings: vec![],
},
);
@@ -163,8 +163,8 @@
verify::<3>(
genesis_opreturn(&genesis_info, out_token_type, Some(3), 55),
&[
- spent_amount(meta(TOKEN_ID2, Fungible), 77),
- spent_amount(meta_alp(TOKEN_ID3), 22),
+ spent_atoms(meta(TOKEN_ID2, Fungible), 77),
+ spent_atoms(meta_alp(TOKEN_ID3), 22),
],
),
TokenTx {
@@ -177,20 +177,20 @@
},
TokenTxEntry {
meta: meta(TOKEN_ID2, Fungible),
- actual_burn_amount: 77,
+ actual_burn_atoms: 77,
is_invalid: true,
..empty_entry()
},
TokenTxEntry {
meta: meta_alp(TOKEN_ID3),
- actual_burn_amount: 22,
+ actual_burn_atoms: 22,
is_invalid: true,
..empty_entry()
},
],
outputs: vec![
None,
- token_amount::<0>(55),
+ token_atoms::<0>(55),
None,
token_baton::<0>().filter(|_| out_token_type != MintVault),
],
@@ -206,9 +206,9 @@
verify::<1>(
genesis_opreturn(&INFO, Nft1Child, None, 1),
&[
- spent_amount(meta(TOKEN_ID2, Nft1Group), 77),
- spent_amount(meta(TOKEN_ID2, Nft1Group), 44),
- spent_amount(meta_alp(TOKEN_ID3), 22),
+ spent_atoms(meta(TOKEN_ID2, Nft1Group), 77),
+ spent_atoms(meta(TOKEN_ID2, Nft1Group), 44),
+ spent_atoms(meta_alp(TOKEN_ID3), 22),
],
),
TokenTx {
@@ -222,18 +222,18 @@
},
TokenTxEntry {
meta: meta(TOKEN_ID2, Nft1Group),
- actual_burn_amount: 44,
+ actual_burn_atoms: 44,
is_invalid: true,
..empty_entry()
},
TokenTxEntry {
meta: meta_alp(TOKEN_ID3),
- actual_burn_amount: 22,
+ actual_burn_atoms: 22,
is_invalid: true,
..empty_entry()
},
],
- outputs: vec![None, token_amount::<0>(1),],
+ outputs: vec![None, token_atoms::<0>(1),],
failed_parsings: vec![],
},
);
diff --git a/chronik/bitcoinsuite-slp/tests/test_verify_slp_mint.rs b/chronik/bitcoinsuite-slp/tests/test_verify_slp_mint.rs
--- a/chronik/bitcoinsuite-slp/tests/test_verify_slp_mint.rs
+++ b/chronik/bitcoinsuite-slp/tests/test_verify_slp_mint.rs
@@ -6,8 +6,8 @@
slp::mint_opreturn,
structs::TxType,
test_helpers::{
- empty_entry, meta_alp, meta_slp as meta, spent_amount, spent_baton,
- token_amount, token_baton, verify, TOKEN_ID2, TOKEN_ID3, TOKEN_ID4,
+ empty_entry, meta_alp, meta_slp as meta, spent_atoms, spent_baton,
+ token_atoms, token_baton, verify, TOKEN_ID2, TOKEN_ID3, TOKEN_ID4,
},
token_tx::{TokenTx, TokenTxEntry},
token_type::SlpTokenType::*,
@@ -41,14 +41,14 @@
assert_eq!(
verify::<1>(
mint_opreturn(&TOKEN_ID2, token_type, None, 44),
- &[spent_amount(meta(TOKEN_ID2, token_type), 77)],
+ &[spent_atoms(meta(TOKEN_ID2, token_type), 77)],
),
TokenTx {
entries: vec![TokenTxEntry {
meta: meta(TOKEN_ID2, token_type),
tx_type: Some(TxType::MINT),
is_invalid: true,
- actual_burn_amount: 77,
+ actual_burn_atoms: 77,
burn_error: Some(BurnError::MissingMintBaton),
..empty_entry()
}],
@@ -142,7 +142,7 @@
}],
outputs: vec![
None,
- token_amount::<0>(44),
+ token_atoms::<0>(44),
None,
token_baton::<0>(),
],
@@ -163,7 +163,7 @@
verify::<3>(
mint_opreturn(&TOKEN_ID2, token_type, Some(3), 44),
&[
- spent_amount(meta(TOKEN_ID2, token_type), 77),
+ spent_atoms(meta(TOKEN_ID2, token_type), 77),
spent_baton(meta(TOKEN_ID2, token_type)),
spent_baton(meta(TOKEN_ID2, wrong_token_type)),
spent_baton(meta(TOKEN_ID3, token_type)),
@@ -175,7 +175,7 @@
TokenTxEntry {
meta: meta(TOKEN_ID2, token_type),
tx_type: Some(TxType::MINT),
- actual_burn_amount: 77,
+ actual_burn_atoms: 77,
..empty_entry()
},
TokenTxEntry {
@@ -199,7 +199,7 @@
],
outputs: vec![
None,
- token_amount::<0>(44),
+ token_atoms::<0>(44),
None,
token_baton::<0>(),
],
diff --git a/chronik/bitcoinsuite-slp/tests/test_verify_slp_mint_vault.rs b/chronik/bitcoinsuite-slp/tests/test_verify_slp_mint_vault.rs
--- a/chronik/bitcoinsuite-slp/tests/test_verify_slp_mint_vault.rs
+++ b/chronik/bitcoinsuite-slp/tests/test_verify_slp_mint_vault.rs
@@ -12,8 +12,8 @@
slp::mint_vault_opreturn,
structs::{GenesisInfo, TxType},
test_helpers::{
- empty_entry, meta_alp, meta_slp as meta, spent_amount, spent_baton,
- token_amount, TOKEN_ID2, TOKEN_ID3, TOKEN_ID4, TXID,
+ empty_entry, meta_alp, meta_slp as meta, spent_atoms, spent_baton,
+ token_atoms, TOKEN_ID2, TOKEN_ID3, TOKEN_ID4, TXID,
},
token_tx::{TokenTx, TokenTxEntry},
token_type::SlpTokenType::*,
@@ -31,7 +31,7 @@
TXID,
TxMut {
outputs: [
- [TxOutput { value: 0, script }].as_ref(),
+ [TxOutput { sats: 0, script }].as_ref(),
&vec![TxOutput::default(); N],
]
.concat(),
@@ -137,9 +137,9 @@
}],
outputs: vec![
None,
- token_amount::<0>(30),
+ token_atoms::<0>(30),
None,
- token_amount::<0>(50),
+ token_atoms::<0>(50),
],
failed_parsings: vec![],
},
@@ -167,9 +167,9 @@
}],
outputs: vec![
None,
- token_amount::<0>(30),
+ token_atoms::<0>(30),
None,
- token_amount::<0>(50),
+ token_atoms::<0>(50),
],
failed_parsings: vec![],
},
@@ -182,9 +182,9 @@
verify::<3>(
mint_vault_opreturn(&TOKEN_ID2, [10, 0, 20]),
&[
- spent_amount(meta(TOKEN_ID2, MintVault), 80),
+ spent_atoms(meta(TOKEN_ID2, MintVault), 80),
spent_baton(meta(TOKEN_ID3, Fungible)),
- spent_amount(meta(TOKEN_ID3, Fungible), 800),
+ spent_atoms(meta(TOKEN_ID3, Fungible), 800),
spent_baton(meta_alp(TOKEN_ID4)),
],
&[
@@ -199,14 +199,14 @@
TokenTxEntry {
meta: meta(TOKEN_ID2, MintVault),
tx_type: Some(TxType::MINT),
- actual_burn_amount: 80,
+ actual_burn_atoms: 80,
..empty_entry()
},
TokenTxEntry {
meta: meta(TOKEN_ID3, Fungible),
tx_type: None,
is_invalid: true,
- actual_burn_amount: 800,
+ actual_burn_atoms: 800,
burns_mint_batons: true,
..empty_entry()
},
@@ -220,9 +220,9 @@
],
outputs: vec![
None,
- token_amount::<0>(10),
+ token_atoms::<0>(10),
None,
- token_amount::<0>(20),
+ token_atoms::<0>(20),
],
failed_parsings: vec![],
},
diff --git a/chronik/bitcoinsuite-slp/tests/test_verify_slp_send.rs b/chronik/bitcoinsuite-slp/tests/test_verify_slp_send.rs
--- a/chronik/bitcoinsuite-slp/tests/test_verify_slp_send.rs
+++ b/chronik/bitcoinsuite-slp/tests/test_verify_slp_send.rs
@@ -6,8 +6,8 @@
slp::send_opreturn,
structs::TxType,
test_helpers::{
- empty_entry, meta_slp as meta, spent_amount, spent_amount_group,
- spent_baton, token_amount, verify, TOKEN_ID2, TOKEN_ID3, TOKEN_ID4,
+ empty_entry, meta_slp as meta, spent_atoms, spent_atoms_group,
+ spent_baton, token_atoms, verify, TOKEN_ID2, TOKEN_ID3, TOKEN_ID4,
},
token_tx::{TokenTx, TokenTxEntry},
token_type::SlpTokenType::*,
@@ -44,14 +44,14 @@
assert_eq!(
verify::<1>(
send_opreturn(&TOKEN_ID2, token_type, &[7]),
- &[spent_amount(meta(TOKEN_ID2, token_type), 5)],
+ &[spent_atoms(meta(TOKEN_ID2, token_type), 5)],
),
TokenTx {
entries: vec![TokenTxEntry {
meta: meta(TOKEN_ID2, token_type),
tx_type: Some(TxType::SEND),
is_invalid: true,
- actual_burn_amount: 5,
+ actual_burn_atoms: 5,
burn_error: Some(BurnError::InsufficientInputSum {
required: 7,
actual: 5,
@@ -71,7 +71,7 @@
assert_eq!(
verify::<1>(
send_opreturn(&TOKEN_ID2, token_type, &[7]),
- &[spent_amount(meta(TOKEN_ID3, token_type), 7)],
+ &[spent_atoms(meta(TOKEN_ID3, token_type), 7)],
),
TokenTx {
entries: vec![
@@ -87,7 +87,7 @@
},
TokenTxEntry {
meta: meta(TOKEN_ID3, token_type),
- actual_burn_amount: 7,
+ actual_burn_atoms: 7,
is_invalid: true,
..empty_entry()
},
@@ -109,7 +109,7 @@
assert_eq!(
verify::<1>(
send_opreturn(&TOKEN_ID2, wrong_token_type, &[7]),
- &[spent_amount(meta(TOKEN_ID2, token_type), 7)],
+ &[spent_atoms(meta(TOKEN_ID2, token_type), 7)],
),
TokenTx {
entries: vec![
@@ -125,7 +125,7 @@
},
TokenTxEntry {
meta: meta(TOKEN_ID2, token_type),
- actual_burn_amount: 7,
+ actual_burn_atoms: 7,
is_invalid: true,
..empty_entry()
},
@@ -149,11 +149,11 @@
&[1, 0xffff_ffff_ffff_0000, 0xffff_ffff_ffff_0001, 2],
),
&[
- spent_amount(
+ spent_atoms(
meta(TOKEN_ID2, token_type),
0xffff_ffff_ffff_0000,
),
- spent_amount(
+ spent_atoms(
meta(TOKEN_ID2, token_type),
0xffff_ffff_ffff_0003,
),
@@ -164,7 +164,7 @@
meta: meta(TOKEN_ID2, token_type),
tx_type: Some(TxType::SEND),
is_invalid: true,
- actual_burn_amount: 0x1fffffffffffe0003,
+ actual_burn_atoms: 0x1fffffffffffe0003,
burn_error: Some(BurnError::InsufficientInputSum {
required: 0x1fffffffffffe0004,
actual: 0x1fffffffffffe0003,
@@ -202,7 +202,7 @@
assert_eq!(
verify::<4>(
send_opreturn(&TOKEN_ID2, token_type, &[1, 2, 0, 4]),
- &[spent_amount(meta(TOKEN_ID2, token_type), 7)],
+ &[spent_atoms(meta(TOKEN_ID2, token_type), 7)],
),
TokenTx {
entries: vec![TokenTxEntry {
@@ -212,10 +212,10 @@
}],
outputs: vec![
None,
- token_amount::<0>(1),
- token_amount::<0>(2),
+ token_atoms::<0>(1),
+ token_atoms::<0>(2),
None,
- token_amount::<0>(4),
+ token_atoms::<0>(4),
],
failed_parsings: vec![],
},
@@ -229,7 +229,7 @@
assert_eq!(
verify::<1>(
send_opreturn(&TOKEN_ID2, Nft1Child, &[1]),
- &[spent_amount_group(
+ &[spent_atoms_group(
meta(TOKEN_ID3, Nft1Child),
1,
meta(TOKEN_ID4, Nft1Group),
@@ -253,7 +253,7 @@
tx_type: None,
// group still carried over even when burned
group_token_meta: Some(meta(TOKEN_ID4, Nft1Group)),
- actual_burn_amount: 1,
+ actual_burn_atoms: 1,
is_invalid: true,
..empty_entry()
},
@@ -266,7 +266,7 @@
assert_eq!(
verify::<1>(
send_opreturn(&TOKEN_ID2, Fungible, &[1]),
- &[spent_amount_group(
+ &[spent_atoms_group(
meta(TOKEN_ID2, Nft1Child),
1,
meta(TOKEN_ID3, Nft1Group),
@@ -287,7 +287,7 @@
TokenTxEntry {
meta: meta(TOKEN_ID2, Nft1Child),
group_token_meta: Some(meta(TOKEN_ID3, Nft1Group)),
- actual_burn_amount: 1,
+ actual_burn_atoms: 1,
is_invalid: true,
..empty_entry()
},
@@ -304,7 +304,7 @@
assert_eq!(
verify::<4>(
send_opreturn(&TOKEN_ID2, Nft1Child, &[1, 2, 0, 4]),
- &[spent_amount_group(
+ &[spent_atoms_group(
meta(TOKEN_ID2, Nft1Child),
7,
meta(TOKEN_ID3, Nft1Group),
@@ -319,10 +319,10 @@
}],
outputs: vec![
None,
- token_amount::<0>(1),
- token_amount::<0>(2),
+ token_atoms::<0>(1),
+ token_atoms::<0>(2),
None,
- token_amount::<0>(4),
+ token_atoms::<0>(4),
],
failed_parsings: vec![],
},
@@ -334,7 +334,7 @@
assert_eq!(
verify::<4>(
send_opreturn(&TOKEN_ID2, Nft1Child, &[1, 2, 0, 4]),
- &[spent_amount_group(
+ &[spent_atoms_group(
meta(TOKEN_ID2, Nft1Child),
7,
meta(TOKEN_ID3, Nft1Group)
@@ -349,10 +349,10 @@
}],
outputs: vec![
None,
- token_amount::<0>(1),
- token_amount::<0>(2),
+ token_atoms::<0>(1),
+ token_atoms::<0>(2),
None,
- token_amount::<0>(4),
+ token_atoms::<0>(4),
],
failed_parsings: vec![],
},
@@ -370,11 +370,11 @@
&[1, 0xffff_ffff_ffff_0000, 0xffff_ffff_ffff_0001, 2],
),
&[
- spent_amount(
+ spent_atoms(
meta(TOKEN_ID2, token_type),
0xffff_ffff_ffff_0000,
),
- spent_amount(
+ spent_atoms(
meta(TOKEN_ID2, token_type),
0xffff_ffff_ffff_0004,
),
@@ -388,10 +388,10 @@
}],
outputs: vec![
None,
- token_amount::<0>(1),
- token_amount::<0>(0xffff_ffff_ffff_0000),
- token_amount::<0>(0xffff_ffff_ffff_0001),
- token_amount::<0>(2),
+ token_atoms::<0>(1),
+ token_atoms::<0>(0xffff_ffff_ffff_0000),
+ token_atoms::<0>(0xffff_ffff_ffff_0001),
+ token_atoms::<0>(2),
],
failed_parsings: vec![],
},
@@ -410,20 +410,20 @@
&[0xffff_ffff_ffff_0000, 0xffff_ffff_ffff_0002, 1],
),
&[
- spent_amount(
+ spent_atoms(
meta(TOKEN_ID2, token_type),
0xffff_ffff_ffff_0000,
),
- spent_amount(
+ spent_atoms(
meta(TOKEN_ID2, token_type),
0xefff_ffff_ffff_0000,
),
- spent_amount(
+ spent_atoms(
meta(TOKEN_ID2, token_type),
0x2fff_ffff_ffff_0000,
),
- spent_amount(meta(TOKEN_ID2, token_type), 10),
- spent_amount(meta(TOKEN_ID3, Nft1Child), 1),
+ spent_atoms(meta(TOKEN_ID2, token_type), 10),
+ spent_atoms(meta(TOKEN_ID3, Nft1Child), 1),
spent_baton(meta(TOKEN_ID4, Fungible)),
],
),
@@ -432,13 +432,13 @@
TokenTxEntry {
meta: meta(TOKEN_ID2, token_type),
tx_type: Some(TxType::SEND),
- actual_burn_amount: 0x1fff_ffff_ffff_0007,
+ actual_burn_atoms: 0x1fff_ffff_ffff_0007,
..empty_entry()
},
TokenTxEntry {
meta: meta(TOKEN_ID3, Nft1Child),
tx_type: None,
- actual_burn_amount: 1,
+ actual_burn_atoms: 1,
is_invalid: true,
..empty_entry()
},
@@ -452,9 +452,9 @@
],
outputs: vec![
None,
- token_amount::<0>(0xffff_ffff_ffff_0000),
- token_amount::<0>(0xffff_ffff_ffff_0002),
- token_amount::<0>(1),
+ token_atoms::<0>(0xffff_ffff_ffff_0000),
+ token_atoms::<0>(0xffff_ffff_ffff_0002),
+ token_atoms::<0>(1),
],
failed_parsings: vec![],
},
diff --git a/chronik/chronik-bridge/src/ffi.rs b/chronik/chronik-bridge/src/ffi.rs
--- a/chronik/chronik-bridge/src/ffi.rs
+++ b/chronik/chronik-bridge/src/ffi.rs
@@ -110,7 +110,7 @@
#[derive(Clone, Debug, Default, Eq, PartialEq)]
pub struct TxOutput {
/// Value of the output.
- pub value: i64,
+ pub sats: i64,
/// Script locking the output.
pub script: Vec<u8>,
}
diff --git a/chronik/chronik-bridge/src/util.rs b/chronik/chronik-bridge/src/util.rs
--- a/chronik/chronik-bridge/src/util.rs
+++ b/chronik/chronik-bridge/src/util.rs
@@ -89,7 +89,7 @@
impl From<ffi::TxOutput> for TxOutput {
fn from(output: ffi::TxOutput) -> Self {
TxOutput {
- value: output.value,
+ sats: output.sats,
script: Script::new(output.script.into()),
}
}
@@ -98,7 +98,7 @@
impl From<TxOutput> for ffi::TxOutput {
fn from(output: TxOutput) -> Self {
ffi::TxOutput {
- value: output.value,
+ sats: output.sats,
script: output.script.to_vec(),
}
}
diff --git a/chronik/chronik-cpp/chronik_bridge.cpp b/chronik/chronik-cpp/chronik_bridge.cpp
--- a/chronik/chronik-cpp/chronik_bridge.cpp
+++ b/chronik/chronik-cpp/chronik_bridge.cpp
@@ -35,7 +35,7 @@
chronik_bridge::TxOutput BridgeTxOutput(const CTxOut &output) {
return {
- .value = output.nValue / Amount::satoshi(),
+ .sats = output.nValue / Amount::satoshi(),
.script = chronik::util::ToRustVec<uint8_t>(output.scriptPubKey),
};
}
diff --git a/chronik/chronik-db/src/group.rs b/chronik/chronik-db/src/group.rs
--- a/chronik/chronik-db/src/group.rs
+++ b/chronik/chronik-db/src/group.rs
@@ -133,24 +133,24 @@
fn from_output(output: &TxOutput) -> Self;
}
-/// [`UtxoData`] that only stores the output value but not the script.
+/// [`UtxoData`] that only stores the output sats but not the script.
/// This is useful where the member itself is the script so storing it would be
/// redundant.
pub type UtxoDataValue = i64;
impl UtxoData for UtxoDataValue {
fn from_output(output: &TxOutput) -> Self {
- output.value
+ output.sats
}
}
-/// [`UtxoData`] that stores the full output, including value and script.
+/// [`UtxoData`] that stores the full output, including sats and script.
/// This is useful where the member isn't the script, e.g. a token ID.
pub type UtxoDataOutput = (i64, Bytes);
impl UtxoData for UtxoDataOutput {
fn from_output(output: &TxOutput) -> Self {
- (output.value, output.script.bytecode().clone())
+ (output.sats, output.script.bytecode().clone())
}
}
diff --git a/chronik/chronik-db/src/io/block_stats.rs b/chronik/chronik-db/src/io/block_stats.rs
--- a/chronik/chronik-db/src/io/block_stats.rs
+++ b/chronik/chronik-db/src/io/block_stats.rs
@@ -86,18 +86,18 @@
for tx in txs {
for output in &tx.tx.outputs {
if output.script.is_opreturn() {
- sum_burned_sats += output.value;
+ sum_burned_sats += output.sats;
}
}
let tx_output_sats =
- tx.tx.outputs.iter().map(|output| output.value).sum::<i64>();
+ tx.tx.outputs.iter().map(|output| output.sats).sum::<i64>();
if tx.is_coinbase {
sum_coinbase_output_sats += tx_output_sats;
} else {
sum_normal_output_sats += tx_output_sats;
for input in &tx.tx.inputs {
if let Some(coin) = input.coin.as_ref() {
- sum_input_sats += coin.output.value;
+ sum_input_sats += coin.output.sats;
}
}
}
@@ -216,7 +216,7 @@
.inputs
.clone(),
outputs: vec![TxOutput {
- value: 60,
+ sats: 60,
script: {
let mut script = ScriptMut::default();
script.put_opcodes([OP_RETURN, OP_1]);
diff --git a/chronik/chronik-db/src/io/token/batch.rs b/chronik/chronik-db/src/io/token/batch.rs
--- a/chronik/chronik-db/src/io/token/batch.rs
+++ b/chronik/chronik-db/src/io/token/batch.rs
@@ -454,8 +454,8 @@
return DbToken::NoToken;
};
match token.variant {
- TokenVariant::Amount(amount) => {
- DbToken::Amount(meta_idx(&token.meta, metas), amount)
+ TokenVariant::Atoms(atoms) => {
+ DbToken::Atoms(meta_idx(&token.meta, metas), atoms)
}
TokenVariant::MintBaton => {
DbToken::MintBaton(meta_idx(&token.meta, metas))
diff --git a/chronik/chronik-db/src/io/token/data.rs b/chronik/chronik-db/src/io/token/data.rs
--- a/chronik/chronik-db/src/io/token/data.rs
+++ b/chronik/chronik-db/src/io/token/data.rs
@@ -7,7 +7,7 @@
use std::collections::BTreeMap;
use bitcoinsuite_slp::{
- structs::{Amount, Token, TokenMeta, TokenVariant},
+ structs::{Atoms, Token, TokenMeta, TokenVariant},
token_id::TokenId,
token_type::{AlpTokenType, SlpTokenType, TokenType},
verify::SpentToken,
@@ -49,8 +49,8 @@
pub enum DbToken {
/// No token value
NoToken,
- /// Token amount
- Amount(TokenIdx, Amount),
+ /// Token amount in atoms (base tokens)
+ Atoms(TokenIdx, Atoms),
/// Mint baton
MintBaton(TokenIdx),
/// Unknown SLP token
@@ -84,7 +84,7 @@
) -> Result<Option<SpentToken>, E> {
let (token_num_idx, variant) = match *db_token {
DbToken::NoToken => return Ok(None),
- DbToken::Amount(idx, amount) => (idx, TokenVariant::Amount(amount)),
+ DbToken::Atoms(idx, atoms) => (idx, TokenVariant::Atoms(atoms)),
DbToken::MintBaton(idx) => (idx, TokenVariant::MintBaton),
DbToken::UnknownSlp(byte) | DbToken::UnknownAlp(byte) => {
let token_type = match *db_token {
@@ -135,7 +135,7 @@
/// Create a new [`DbToken`] but with a different index.
pub fn with_idx(&self, idx: TokenIdx) -> Self {
match *self {
- DbToken::Amount(_, amount) => DbToken::Amount(idx, amount),
+ DbToken::Atoms(_, atoms) => DbToken::Atoms(idx, atoms),
DbToken::MintBaton(_) => DbToken::MintBaton(idx),
_ => *self,
}
@@ -146,7 +146,7 @@
pub fn token_num_idx(&self) -> Option<TokenIdx> {
match *self {
DbToken::NoToken => None,
- DbToken::Amount(idx, _) => Some(idx),
+ DbToken::Atoms(idx, _) => Some(idx),
DbToken::MintBaton(idx) => Some(idx),
DbToken::UnknownSlp(_) => None,
DbToken::UnknownAlp(_) => None,
diff --git a/chronik/chronik-db/src/io/token/tests/mock.rs b/chronik/chronik-db/src/io/token/tests/mock.rs
--- a/chronik/chronik-db/src/io/token/tests/mock.rs
+++ b/chronik/chronik-db/src/io/token/tests/mock.rs
@@ -7,7 +7,7 @@
script::Script,
tx::{Coin, OutPoint, Tx, TxId, TxInput, TxMut, TxOutput},
};
-use bitcoinsuite_slp::{structs::Amount, token_id::TokenId};
+use bitcoinsuite_slp::{structs::Atoms, token_id::TokenId};
use rocksdb::WriteBatch;
use crate::{
@@ -28,8 +28,8 @@
TokenId::new(TxId::new([num; 32]))
}
-pub(crate) fn db_amount<const N: u32>(amount: Amount) -> DbToken {
- DbToken::Amount(N, amount)
+pub(crate) fn db_atoms<const N: u32>(atoms: Atoms) -> DbToken {
+ DbToken::Atoms(N, atoms)
}
pub(crate) fn db_baton<const N: u32>() -> DbToken {
@@ -55,7 +55,7 @@
},
coin: Some(Coin {
output: TxOutput {
- value: 0,
+ sats: 0,
script: Script::EMPTY,
},
..Default::default()
@@ -64,7 +64,7 @@
})
.collect(),
outputs: [TxOutput {
- value: 0,
+ sats: 0,
script: op_return_script,
}]
.into_iter()
@@ -93,14 +93,14 @@
out_idx,
},
coin: Some(Coin {
- output: TxOutput { value: 0, script },
+ output: TxOutput { sats: 0, script },
..Default::default()
}),
..Default::default()
})
.collect(),
outputs: [TxOutput {
- value: 0,
+ sats: 0,
script: op_return_script,
}]
.into_iter()
diff --git a/chronik/chronik-db/src/io/token/tests/test_batch_alp.rs b/chronik/chronik-db/src/io/token/tests/test_batch_alp.rs
--- a/chronik/chronik-db/src/io/token/tests/test_batch_alp.rs
+++ b/chronik/chronik-db/src/io/token/tests/test_batch_alp.rs
@@ -14,7 +14,7 @@
use pretty_assertions::assert_eq;
use crate::io::token::{
- tests::mock::{db_amount, db_baton, make_tx, token_id, MockTokenDb},
+ tests::mock::{db_atoms, db_baton, make_tx, token_id, MockTokenDb},
DbToken::NoToken,
DbTokenTx, TokenReader,
};
@@ -86,9 +86,9 @@
inputs: vec![],
outputs: vec![
NoToken,
- db_amount::<0>(1),
+ db_atoms::<0>(1),
NoToken,
- db_amount::<0>(3),
+ db_atoms::<0>(3),
db_baton::<0>(),
db_baton::<0>(),
],
@@ -105,10 +105,10 @@
inputs: vec![db_baton::<0>()],
outputs: vec![
NoToken,
- db_amount::<0>(4),
+ db_atoms::<0>(4),
NoToken,
NoToken,
- db_amount::<0>(5),
+ db_atoms::<0>(5),
db_baton::<0>(),
],
..Default::default()
@@ -121,12 +121,12 @@
token_reader.token_tx(3)?,
Some(DbTokenTx {
token_tx_nums: vec![1],
- inputs: vec![db_amount::<0>(3), db_amount::<0>(4)],
+ inputs: vec![db_atoms::<0>(3), db_atoms::<0>(4)],
outputs: vec![
NoToken,
- db_amount::<0>(1),
+ db_atoms::<0>(1),
NoToken,
- db_amount::<0>(6),
+ db_atoms::<0>(6),
],
..Default::default()
}),
@@ -166,12 +166,12 @@
token_reader.token_tx(5)?,
Some(DbTokenTx {
token_tx_nums: vec![5, 1],
- inputs: vec![db_baton::<1>(), db_amount::<1>(1)],
+ inputs: vec![db_baton::<1>(), db_atoms::<1>(1)],
outputs: vec![
NoToken,
- db_amount::<0>(100),
- db_amount::<1>(4),
- db_amount::<0>(200),
+ db_atoms::<0>(100),
+ db_atoms::<1>(4),
+ db_atoms::<0>(200),
db_baton::<0>(),
db_baton::<1>(),
],
diff --git a/chronik/chronik-db/src/io/token/tests/test_batch_burn.rs b/chronik/chronik-db/src/io/token/tests/test_batch_burn.rs
--- a/chronik/chronik-db/src/io/token/tests/test_batch_burn.rs
+++ b/chronik/chronik-db/src/io/token/tests/test_batch_burn.rs
@@ -14,7 +14,7 @@
use pretty_assertions::assert_eq;
use crate::io::token::{
- tests::mock::{db_amount, make_tx, token_id, MockTokenDb},
+ tests::mock::{db_atoms, make_tx, token_id, MockTokenDb},
DbToken::NoToken,
DbTokenTx, TokenReader,
};
@@ -78,7 +78,7 @@
token_reader.token_tx(5)?,
Some(DbTokenTx {
token_tx_nums: vec![3],
- inputs: vec![db_amount::<0>(1000)],
+ inputs: vec![db_atoms::<0>(1000)],
outputs: vec![NoToken],
..Default::default()
}),
@@ -88,7 +88,7 @@
token_reader.token_tx(6)?,
Some(DbTokenTx {
token_tx_nums: vec![4],
- inputs: vec![db_amount::<0>(1000)],
+ inputs: vec![db_atoms::<0>(1000)],
outputs: vec![NoToken],
..Default::default()
}),
diff --git a/chronik/chronik-db/src/io/token/tests/test_batch_common.rs b/chronik/chronik-db/src/io/token/tests/test_batch_common.rs
--- a/chronik/chronik-db/src/io/token/tests/test_batch_common.rs
+++ b/chronik/chronik-db/src/io/token/tests/test_batch_common.rs
@@ -15,7 +15,7 @@
use pretty_assertions::assert_eq;
use crate::io::token::{
- tests::mock::{db_amount, db_baton, make_tx, token_id, MockTokenDb},
+ tests::mock::{db_atoms, db_baton, make_tx, token_id, MockTokenDb},
BatchError,
DbToken::NoToken,
DbTokenTx, TokenReader,
@@ -165,7 +165,7 @@
Some(DbTokenTx {
token_tx_nums: vec![4],
inputs: vec![],
- outputs: vec![NoToken, db_amount::<0>(1000), db_baton::<0>()],
+ outputs: vec![NoToken, db_atoms::<0>(1000), db_baton::<0>()],
..Default::default()
}),
);
@@ -179,8 +179,8 @@
token_reader.token_tx(2)?,
Some(DbTokenTx {
token_tx_nums: vec![4],
- inputs: vec![db_amount::<0>(1000)],
- outputs: vec![NoToken, db_amount::<0>(400)],
+ inputs: vec![db_atoms::<0>(1000)],
+ outputs: vec![NoToken, db_atoms::<0>(400)],
..Default::default()
}),
);
@@ -191,8 +191,8 @@
token_reader.token_tx(3)?,
Some(DbTokenTx {
token_tx_nums: vec![4],
- inputs: vec![db_amount::<0>(400)],
- outputs: vec![NoToken, db_amount::<0>(300), db_amount::<0>(100),],
+ inputs: vec![db_atoms::<0>(400)],
+ outputs: vec![NoToken, db_atoms::<0>(300), db_atoms::<0>(100),],
..Default::default()
}),
);
@@ -203,7 +203,7 @@
token_reader.token_tx(1)?,
Some(DbTokenTx {
token_tx_nums: vec![4],
- inputs: vec![db_amount::<0>(300)],
+ inputs: vec![db_atoms::<0>(300)],
outputs: vec![NoToken, NoToken, NoToken],
..Default::default()
}),
diff --git a/chronik/chronik-db/src/io/token/tests/test_batch_genesis.rs b/chronik/chronik-db/src/io/token/tests/test_batch_genesis.rs
--- a/chronik/chronik-db/src/io/token/tests/test_batch_genesis.rs
+++ b/chronik/chronik-db/src/io/token/tests/test_batch_genesis.rs
@@ -15,7 +15,7 @@
use pretty_assertions::assert_eq;
use crate::io::token::{
- tests::mock::{db_amount, db_baton, make_tx, token_id, MockTokenDb},
+ tests::mock::{db_atoms, db_baton, make_tx, token_id, MockTokenDb},
DbToken::NoToken,
DbTokenTx, TokenReader,
};
@@ -53,7 +53,7 @@
inputs: vec![],
outputs: vec![
NoToken,
- db_amount::<0>(1000),
+ db_atoms::<0>(1000),
NoToken,
db_baton::<0>(),
],
@@ -109,9 +109,9 @@
inputs: vec![],
outputs: vec![
NoToken,
- db_amount::<0>(100),
+ db_atoms::<0>(100),
NoToken,
- db_amount::<0>(200),
+ db_atoms::<0>(200),
db_baton::<0>(),
db_baton::<0>(),
],
diff --git a/chronik/chronik-db/src/io/token/tests/test_batch_nft.rs b/chronik/chronik-db/src/io/token/tests/test_batch_nft.rs
--- a/chronik/chronik-db/src/io/token/tests/test_batch_nft.rs
+++ b/chronik/chronik-db/src/io/token/tests/test_batch_nft.rs
@@ -13,7 +13,7 @@
use pretty_assertions::assert_eq;
use crate::io::token::{
- tests::mock::{db_amount, db_baton, make_tx, token_id, MockTokenDb},
+ tests::mock::{db_atoms, db_baton, make_tx, token_id, MockTokenDb},
DbToken::NoToken,
DbTokenTx, TokenReader,
};
@@ -69,7 +69,7 @@
Some(DbTokenTx {
token_tx_nums: vec![1],
inputs: vec![],
- outputs: vec![NoToken, db_amount::<0>(1000), db_baton::<0>()],
+ outputs: vec![NoToken, db_atoms::<0>(1000), db_baton::<0>()],
..Default::default()
}),
);
@@ -83,14 +83,14 @@
token_reader.token_tx(2)?,
Some(DbTokenTx {
token_tx_nums: vec![1],
- inputs: vec![db_amount::<0>(1000)],
+ inputs: vec![db_atoms::<0>(1000)],
outputs: vec![
NoToken,
- db_amount::<0>(1),
- db_amount::<0>(1),
- db_amount::<0>(1),
- db_amount::<0>(1),
- db_amount::<0>(996),
+ db_atoms::<0>(1),
+ db_atoms::<0>(1),
+ db_atoms::<0>(1),
+ db_atoms::<0>(1),
+ db_atoms::<0>(996),
],
..Default::default()
}),
@@ -103,8 +103,8 @@
Some(DbTokenTx {
token_tx_nums: vec![3, 1],
group_token_indices: vec![(0, 1)].into_iter().collect(),
- inputs: vec![db_amount::<1>(1)],
- outputs: vec![NoToken, db_amount::<0>(1)],
+ inputs: vec![db_atoms::<1>(1)],
+ outputs: vec![NoToken, db_atoms::<0>(1)],
..Default::default()
}),
);
@@ -119,8 +119,8 @@
Some(DbTokenTx {
token_tx_nums: vec![4, 1],
group_token_indices: vec![(0, 1)].into_iter().collect(),
- inputs: vec![db_amount::<1>(1)],
- outputs: vec![NoToken, db_amount::<0>(1)],
+ inputs: vec![db_atoms::<1>(1)],
+ outputs: vec![NoToken, db_atoms::<0>(1)],
..Default::default()
}),
);
@@ -158,8 +158,8 @@
Some(DbTokenTx {
token_tx_nums: vec![7, 1],
group_token_indices: vec![(0, 1)].into_iter().collect(),
- inputs: vec![db_amount::<1>(1)],
- outputs: vec![NoToken, db_amount::<0>(1)],
+ inputs: vec![db_atoms::<1>(1)],
+ outputs: vec![NoToken, db_atoms::<0>(1)],
..Default::default()
}),
);
@@ -175,10 +175,10 @@
token_tx_nums: vec![1, 3, 7],
group_token_indices: vec![(1, 0), (2, 0)].into_iter().collect(),
inputs: vec![
- db_amount::<0>(1),
- db_amount::<1>(1),
+ db_atoms::<0>(1),
+ db_atoms::<1>(1),
NoToken,
- db_amount::<2>(1)
+ db_atoms::<2>(1)
],
outputs: vec![NoToken, NoToken],
..Default::default()
diff --git a/chronik/chronik-db/src/io/token/tests/test_batch_unknown.rs b/chronik/chronik-db/src/io/token/tests/test_batch_unknown.rs
--- a/chronik/chronik-db/src/io/token/tests/test_batch_unknown.rs
+++ b/chronik/chronik-db/src/io/token/tests/test_batch_unknown.rs
@@ -11,7 +11,7 @@
use pretty_assertions::assert_eq;
use crate::io::token::{
- tests::mock::{db_amount, make_tx, MockTokenDb},
+ tests::mock::{db_atoms, make_tx, MockTokenDb},
DbToken::{NoToken, UnknownAlp, UnknownSlp},
DbTokenTx, TokenReader,
};
@@ -58,7 +58,7 @@
token_reader.token_tx(3)?,
Some(DbTokenTx {
token_tx_nums: vec![1],
- inputs: vec![db_amount::<0>(1000)],
+ inputs: vec![db_atoms::<0>(1000)],
outputs: vec![NoToken, UnknownAlp(0x22), UnknownAlp(0x22)],
..Default::default()
}),
diff --git a/chronik/chronik-db/src/io/token/tests/test_batch_vault.rs b/chronik/chronik-db/src/io/token/tests/test_batch_vault.rs
--- a/chronik/chronik-db/src/io/token/tests/test_batch_vault.rs
+++ b/chronik/chronik-db/src/io/token/tests/test_batch_vault.rs
@@ -14,7 +14,7 @@
use crate::io::token::{
tests::mock::{
- db_amount, make_tx, make_tx_with_scripts, token_id, MockTokenDb,
+ db_atoms, make_tx, make_tx_with_scripts, token_id, MockTokenDb,
},
DbToken::NoToken,
DbTokenTx, TokenReader, FLAGS_HAS_MINT_VAULT,
@@ -68,7 +68,7 @@
Some(DbTokenTx {
token_tx_nums: vec![2],
inputs: vec![NoToken],
- outputs: vec![NoToken, db_amount::<0>(2000)],
+ outputs: vec![NoToken, db_atoms::<0>(2000)],
..Default::default()
}),
);
@@ -106,7 +106,7 @@
Some(DbTokenTx {
token_tx_nums: vec![2],
inputs: vec![NoToken],
- outputs: vec![NoToken, db_amount::<0>(3000), db_amount::<0>(4000)],
+ outputs: vec![NoToken, db_atoms::<0>(3000), db_atoms::<0>(4000)],
flags: FLAGS_HAS_MINT_VAULT,
..Default::default()
}),
@@ -116,8 +116,8 @@
token_reader.token_tx(7)?,
Some(DbTokenTx {
token_tx_nums: vec![2],
- inputs: vec![db_amount::<0>(2000), db_amount::<0>(3000)],
- outputs: vec![NoToken, db_amount::<0>(4500), db_amount::<0>(500)],
+ inputs: vec![db_atoms::<0>(2000), db_atoms::<0>(3000)],
+ outputs: vec![NoToken, db_atoms::<0>(4500), db_atoms::<0>(500)],
flags: 0,
..Default::default()
}),
diff --git a/chronik/chronik-db/src/mem/tokens.rs b/chronik/chronik-db/src/mem/tokens.rs
--- a/chronik/chronik-db/src/mem/tokens.rs
+++ b/chronik/chronik-db/src/mem/tokens.rs
@@ -337,9 +337,9 @@
},
structs::{GenesisInfo, TxType},
test_helpers::{
- empty_entry, meta_slp, spent_amount, spent_amount_group,
- spent_baton, token_amount, token_baton, TOKEN_ID1, TOKEN_ID3,
- TOKEN_ID4, TOKEN_ID5, TOKEN_ID8,
+ empty_entry, meta_slp, spent_atoms, spent_atoms_group, spent_baton,
+ token_atoms, token_baton, TOKEN_ID1, TOKEN_ID3, TOKEN_ID4,
+ TOKEN_ID5, TOKEN_ID8,
},
token_tx::{TokenTx, TokenTxEntry},
token_type::SlpTokenType::*,
@@ -433,7 +433,7 @@
entries: vec![TokenTxEntry {
meta: meta_slp(TOKEN_ID1, Fungible),
tx_type: Some(TxType::BURN),
- intentional_burn_amount: Some(1000),
+ intentional_burn_atoms: Some(1000),
..empty_entry()
}],
outputs: vec![None, None],
@@ -496,7 +496,7 @@
}],
outputs: vec![
None,
- token_amount::<0>(1234),
+ token_atoms::<0>(1234),
token_baton::<0>(),
],
failed_parsings: vec![],
@@ -505,7 +505,7 @@
assert_eq!(mem_tokens().tx_token_inputs(&txid(3)), None);
assert_eq!(
mem_tokens().spent_token(&outpoint(3, 1))?,
- spent_amount(meta_slp(TOKEN_ID3, Fungible), 1234),
+ spent_atoms(meta_slp(TOKEN_ID3, Fungible), 1234),
);
assert_eq!(
mem_tokens().spent_token(&outpoint(3, 2))?,
@@ -539,7 +539,7 @@
genesis_info: Some(genesis_info),
..empty_entry()
}],
- outputs: vec![None, token_amount::<0>(1000)],
+ outputs: vec![None, token_atoms::<0>(1000)],
failed_parsings: vec![],
}),
);
@@ -565,7 +565,7 @@
genesis_info: Some(GenesisInfo::empty_slp()),
..empty_entry()
}],
- outputs: vec![None, token_amount::<0>(10), token_baton::<0>()],
+ outputs: vec![None, token_atoms::<0>(10), token_baton::<0>()],
failed_parsings: vec![],
}),
);
@@ -612,17 +612,17 @@
}],
outputs: vec![
None,
- token_amount::<0>(1),
- token_amount::<0>(2),
- token_amount::<0>(3),
- token_amount::<0>(4),
+ token_atoms::<0>(1),
+ token_atoms::<0>(2),
+ token_atoms::<0>(3),
+ token_atoms::<0>(4),
],
failed_parsings: vec![],
}),
);
assert_eq!(
mem_tokens().tx_token_inputs(&txid(7)),
- Some([spent_amount(meta_slp(TOKEN_ID5, Nft1Group), 10)].as_ref()),
+ Some([spent_atoms(meta_slp(TOKEN_ID5, Nft1Group), 10)].as_ref()),
);
// Tx 8: Valid NFT1 CHILD GENESIS using mempool SEND output, also burn
@@ -651,7 +651,7 @@
TokenTxEntry {
meta: meta_slp(TOKEN_ID4, MintVault),
is_invalid: true,
- actual_burn_amount: 1000,
+ actual_burn_atoms: 1000,
..empty_entry()
},
TokenTxEntry {
@@ -659,7 +659,7 @@
..empty_entry()
},
],
- outputs: vec![None, token_amount::<0>(1)],
+ outputs: vec![None, token_atoms::<0>(1)],
failed_parsings: vec![],
}),
);
@@ -667,8 +667,8 @@
mem_tokens().tx_token_inputs(&txid(8)),
Some(
[
- spent_amount(meta_slp(TOKEN_ID5, Nft1Group), 1),
- spent_amount(meta_slp(TOKEN_ID4, MintVault), 1000),
+ spent_atoms(meta_slp(TOKEN_ID5, Nft1Group), 1),
+ spent_atoms(meta_slp(TOKEN_ID4, MintVault), 1000),
]
.as_ref()
),
@@ -716,7 +716,7 @@
tx_type: Some(TxType::MINT),
..empty_entry()
}],
- outputs: vec![None, token_amount::<0>(123)],
+ outputs: vec![None, token_atoms::<0>(123)],
failed_parsings: vec![],
}),
);
@@ -736,29 +736,29 @@
TokenTxEntry {
meta: meta_slp(TOKEN_ID3, Fungible),
tx_type: Some(TxType::SEND),
- actual_burn_amount: 234,
+ actual_burn_atoms: 234,
..empty_entry()
},
TokenTxEntry {
meta: meta_slp(TOKEN_ID4, MintVault),
is_invalid: true,
- actual_burn_amount: 123,
+ actual_burn_atoms: 123,
..empty_entry()
},
TokenTxEntry {
meta: meta_slp(TOKEN_ID8, Nft1Child),
group_token_meta: Some(meta_slp(TOKEN_ID5, Nft1Group)),
is_invalid: true,
- actual_burn_amount: 1,
+ actual_burn_atoms: 1,
..empty_entry()
},
],
outputs: vec![
None,
- token_amount::<0>(100),
- token_amount::<0>(200),
- token_amount::<0>(300),
- token_amount::<0>(400),
+ token_atoms::<0>(100),
+ token_atoms::<0>(200),
+ token_atoms::<0>(300),
+ token_atoms::<0>(400),
],
failed_parsings: vec![],
}),
@@ -767,13 +767,13 @@
mem_tokens().tx_token_inputs(&txid(11)),
Some(
[
- spent_amount(meta_slp(TOKEN_ID3, Fungible), 1234),
- spent_amount_group(
+ spent_atoms(meta_slp(TOKEN_ID3, Fungible), 1234),
+ spent_atoms_group(
meta_slp(TOKEN_ID8, Nft1Child),
1,
meta_slp(TOKEN_ID5, Nft1Group),
),
- spent_amount(meta_slp(TOKEN_ID4, MintVault), 123),
+ spent_atoms(meta_slp(TOKEN_ID4, MintVault), 123),
]
.as_ref()
),
@@ -788,7 +788,7 @@
meta: meta_slp(TOKEN_ID3, Fungible),
tx_type: None,
is_invalid: true,
- actual_burn_amount: 200,
+ actual_burn_atoms: 200,
..empty_entry()
}],
outputs: vec![None; 3],
@@ -797,7 +797,7 @@
);
assert_eq!(
mem_tokens().tx_token_inputs(&txid(12)),
- Some([spent_amount(meta_slp(TOKEN_ID3, Fungible), 200)].as_ref()),
+ Some([spent_atoms(meta_slp(TOKEN_ID3, Fungible), 200)].as_ref()),
);
// Test fetch_tx_spent_tokens
@@ -869,7 +869,7 @@
&db,
is_mempool_tx,
)?,
- Ok(vec![spent_amount_group(
+ Ok(vec![spent_atoms_group(
meta_slp(TOKEN_ID8, Nft1Child),
1,
meta_slp(TOKEN_ID5, Nft1Group),
diff --git a/chronik/chronik-db/src/plugins/io.rs b/chronik/chronik-db/src/plugins/io.rs
--- a/chronik/chronik-db/src/plugins/io.rs
+++ b/chronik/chronik-db/src/plugins/io.rs
@@ -482,7 +482,7 @@
use bitcoinsuite_core::{net::Net, script::Script, tx::Tx};
use bitcoinsuite_slp::{
test_helpers::{
- empty_entry, meta_alp, spent_amount, token_amount, TOKEN_ID1,
+ empty_entry, meta_alp, spent_atoms, token_atoms, TOKEN_ID1,
},
token_tx::{TokenTx, TokenTxEntry},
};
@@ -618,7 +618,7 @@
",
)?;
- // Plugin that sums up the input and output token amounts
+ // Plugin that sums up the input and output token atomss
File::create(tempdir.path().join("summer.py"))?.write_all(
b"
from chronik_plugin.plugin import Plugin, PluginOutput
@@ -628,8 +628,8 @@
def version(self):
return '0.0.0'
def run(self, tx):
- input_sum = sum(iput.output.token.amount for iput in tx.inputs)
- output_sum = sum(output.token.amount for output in tx.outputs[1:])
+ input_sum = sum(iput.output.token.atoms for iput in tx.inputs)
+ output_sum = sum(output.token.atoms for output in tx.outputs[1:])
return [PluginOutput(
idx=1,
data=[bytes([input_sum]), bytes([output_sum])],
@@ -852,8 +852,8 @@
}],
outputs: vec![
None,
- token_amount::<0>(50),
- token_amount::<0>(10),
+ token_atoms::<0>(50),
+ token_atoms::<0>(10),
],
..Default::default()
},
@@ -863,8 +863,8 @@
spent_tokens: vec![(
7,
vec![
- spent_amount(meta_alp(TOKEN_ID1), 20),
- spent_amount(meta_alp(TOKEN_ID1), 15),
+ spent_atoms(meta_alp(TOKEN_ID1), 20),
+ spent_atoms(meta_alp(TOKEN_ID1), 15),
],
)]
.into_iter()
diff --git a/chronik/chronik-db/src/test/value_group.rs b/chronik/chronik-db/src/test/value_group.rs
--- a/chronik/chronik-db/src/test/value_group.rs
+++ b/chronik/chronik-db/src/test/value_group.rs
@@ -11,7 +11,7 @@
io::{GroupHistoryConf, GroupUtxoConf},
};
-/// Index by output/input value. While useless in pactice, this makes
+/// Index by output/input sats. While useless in practice, this makes
/// writing tests very convenient and showcases how Group can be used.
#[derive(Debug, Default, Eq, PartialEq)]
pub(crate) struct ValueGroup;
@@ -34,7 +34,7 @@
if let Some(coin) = &input.coin {
inputs.push(MemberItem {
idx,
- member: coin.output.value,
+ member: coin.output.sats,
});
}
}
@@ -51,14 +51,14 @@
for (idx, output) in query.tx.outputs.iter().enumerate() {
outputs.push(MemberItem {
idx,
- member: output.value,
+ member: output.sats,
});
}
outputs
}
- fn ser_member(&self, value: &i64) -> Self::MemberSer {
- ser_value(*value)
+ fn ser_member(&self, sats: &i64) -> Self::MemberSer {
+ ser_value(*sats)
}
fn ser_hash_member(&self, _member: &Self::Member<'_>) -> [u8; 32] {
@@ -81,9 +81,9 @@
}
}
-/// Serialize the value as array
-pub(crate) fn ser_value(value: i64) -> [u8; 8] {
- value.to_be_bytes()
+/// Serialize the sats as array
+pub(crate) fn ser_value(sats: i64) -> [u8; 8] {
+ sats.to_be_bytes()
}
/// Make a tx with inputs and outputs having the given values.
@@ -96,7 +96,7 @@
) -> Tx {
make_inputs_tx(
txid_num,
- input_values.map(|value| (0, 0, value)),
+ input_values.map(|sats| (0, 0, sats)),
output_values,
)
}
@@ -112,14 +112,14 @@
version: 0,
inputs: input_values
.into_iter()
- .map(|(input_txid_num, out_idx, value)| TxInput {
+ .map(|(input_txid_num, out_idx, sats)| TxInput {
prev_out: OutPoint {
txid: TxId::from([input_txid_num; 32]),
out_idx,
},
coin: Some(Coin {
output: TxOutput {
- value,
+ sats,
..Default::default()
},
..Default::default()
@@ -129,8 +129,8 @@
.collect(),
outputs: output_values
.into_iter()
- .map(|value| TxOutput {
- value,
+ .map(|sats| TxOutput {
+ sats,
..Default::default()
})
.collect(),
diff --git a/chronik/chronik-http/src/ws.rs b/chronik/chronik-http/src/ws.rs
--- a/chronik/chronik-http/src/ws.rs
+++ b/chronik/chronik-http/src/ws.rs
@@ -345,7 +345,7 @@
.outputs
.iter()
.map(|output| proto::TxOutput {
- value: output.value,
+ sats: output.sats,
output_script: output.script.to_vec(),
..Default::default()
})
diff --git a/chronik/chronik-indexer/src/query/group_utxos.rs b/chronik/chronik-indexer/src/query/group_utxos.rs
--- a/chronik/chronik-indexer/src/query/group_utxos.rs
+++ b/chronik/chronik-indexer/src/query/group_utxos.rs
@@ -105,7 +105,7 @@
outpoint: Some(make_outpoint_proto(&extra.outpoint)),
block_height: extra.block_height,
is_coinbase: extra.is_coinbase,
- value: data,
+ sats: data,
is_final: extra.is_final,
token: extra
.token
@@ -129,7 +129,7 @@
outpoint: Some(make_outpoint_proto(&extra.outpoint)),
block_height: extra.block_height,
is_coinbase: extra.is_coinbase,
- value: data.0,
+ sats: data.0,
script: data.1.to_vec(),
is_final: extra.is_final,
token: extra
diff --git a/chronik/chronik-indexer/src/query/tx_token_data.rs b/chronik/chronik-indexer/src/query/tx_token_data.rs
--- a/chronik/chronik-indexer/src/query/tx_token_data.rs
+++ b/chronik/chronik-indexer/src/query/tx_token_data.rs
@@ -208,7 +208,7 @@
token_id: token.meta.token_id.to_string(),
token_type: Some(make_token_type_proto(token.meta.token_type)),
entry_idx,
- amount: token.variant.amount(),
+ atoms: token.variant.atoms(),
is_mint_baton: token.variant.is_mint_baton(),
})
}
@@ -225,7 +225,7 @@
token_id: token.meta.token_id.to_string(),
token_type: Some(make_token_type_proto(token.meta.token_type)),
entry_idx: token_output.token_idx as _,
- amount: token.variant.amount() as _,
+ atoms: token.variant.atoms() as _,
is_mint_baton: token.variant.is_mint_baton(),
})
}
@@ -264,9 +264,9 @@
error: failed_coloring.error.to_string(),
})
.collect(),
- actual_burn_amount: entry.actual_burn_amount.to_string(),
- intentional_burn: entry
- .intentional_burn_amount
+ actual_burn_atoms: entry.actual_burn_atoms.to_string(),
+ intentional_burn_atoms: entry
+ .intentional_burn_atoms
.unwrap_or_default(),
burns_mint_batons: entry.burns_mint_batons,
})
@@ -353,7 +353,7 @@
token_id: token.meta.token_id.to_string(),
token_type: Some(make_token_type_proto(token.meta.token_type)),
entry_idx: -1,
- amount: token.variant.amount(),
+ atoms: token.variant.atoms(),
is_mint_baton: token.variant.is_mint_baton(),
}
}
diff --git a/chronik/chronik-indexer/src/query/util.rs b/chronik/chronik-indexer/src/query/util.rs
--- a/chronik/chronik-indexer/src/query/util.rs
+++ b/chronik/chronik-indexer/src/query/util.rs
@@ -69,16 +69,14 @@
.enumerate()
.map(|(input_idx, input)| {
let coin = input.coin.as_ref();
- let (output_script, value) = coin
- .map(|coin| {
- (coin.output.script.to_vec(), coin.output.value)
- })
+ let (output_script, sats) = coin
+ .map(|coin| (coin.output.script.to_vec(), coin.output.sats))
.unwrap_or_default();
proto::TxInput {
prev_out: Some(make_outpoint_proto(&input.prev_out)),
input_script: input.script.to_vec(),
output_script,
- value,
+ sats,
sequence_no: input.sequence,
token: params
.token
@@ -101,7 +99,7 @@
.iter()
.enumerate()
.map(|(output_idx, output)| proto::TxOutput {
- value: output.value,
+ sats: output.sats,
output_script: output.script.to_vec(),
spent_by: params
.outputs_spent
diff --git a/chronik/chronik-plugin-impl/src/context.rs b/chronik/chronik-plugin-impl/src/context.rs
--- a/chronik/chronik-plugin-impl/src/context.rs
+++ b/chronik/chronik-plugin-impl/src/context.rs
@@ -585,7 +585,7 @@
TXID,
TxMut {
version,
- outputs: vec![TxOutput { value: 0, script }]
+ outputs: vec![TxOutput { sats: 0, script }]
.into_iter()
.chain(vec![TxOutput::default(); num_outputs])
.collect(),
diff --git a/chronik/chronik-plugin-impl/src/etoken.py b/chronik/chronik-plugin-impl/src/etoken.py
--- a/chronik/chronik-plugin-impl/src/etoken.py
+++ b/chronik/chronik-plugin-impl/src/etoken.py
@@ -32,7 +32,7 @@
auth_pubkey: Optional[bytes]
# How many decimal places to use when displaying the token.
- # Token amounts are stored in their "base" form, but should be displayed
+ # Token amounts are stored in their "base" form (atoms), but should be displayed
# as `base_amount * 10^-decimals`. E.g. a base amount of 12345 and
# decimals of 4 should be displayed as "1.2345".
decimals: int
@@ -60,10 +60,10 @@
is_invalid: bool = False
# Number of actually burned tokens
- actual_burn_amount: int = 0
+ actual_burn_atoms: int = 0
# Number of burned tokens the user explicitly opted into
- intentional_burn_amount: Optional[int] = None
+ intentional_burn_atoms: Optional[int] = None
# Whether any mint batons of this token are burned in this tx
burns_mint_batons: bool = False
@@ -86,8 +86,8 @@
# Index into `token_entries` of a `Tx` object
entry_idx: int
- # Base token amount of the input/output
- amount: int
+ # Base token amount (aka "atoms") of the input/output
+ atoms: int
# Whether the token is a mint baton
is_mint_baton: bool
diff --git a/chronik/chronik-plugin-impl/src/etoken.rs b/chronik/chronik-plugin-impl/src/etoken.rs
--- a/chronik/chronik-plugin-impl/src/etoken.rs
+++ b/chronik/chronik-plugin-impl/src/etoken.rs
@@ -100,11 +100,9 @@
entry.group_token_meta.map(|meta| meta.token_id.to_string()),
)?;
kwargs.set_item("is_invalid", entry.is_invalid)?;
- kwargs.set_item("actual_burn_amount", entry.actual_burn_amount)?;
- kwargs.set_item(
- "intentional_burn_amount",
- entry.intentional_burn_amount,
- )?;
+ kwargs.set_item("actual_burn_atoms", entry.actual_burn_atoms)?;
+ kwargs
+ .set_item("intentional_burn_atoms", entry.intentional_burn_atoms)?;
kwargs.set_item("burns_mint_batons", entry.burns_mint_batons)?;
kwargs.set_item(
"genesis_info",
@@ -133,7 +131,7 @@
.set_item("token_protocol", entry.getattr(py, "token_protocol")?)?;
kwargs.set_item("token_type", entry.getattr(py, "token_type")?)?;
kwargs.set_item("entry_idx", entry_idx)?;
- kwargs.set_item("amount", token_variant.amount())?;
+ kwargs.set_item("atoms", token_variant.atoms())?;
kwargs.set_item("is_mint_baton", token_variant.is_mint_baton())?;
self.cls_token.call(py, (), Some(&kwargs))
diff --git a/chronik/chronik-plugin-impl/src/tx.py b/chronik/chronik-plugin-impl/src/tx.py
--- a/chronik/chronik-plugin-impl/src/tx.py
+++ b/chronik/chronik-plugin-impl/src/tx.py
@@ -30,7 +30,7 @@
script: CScript
# value of the output, in satoshis
- value: int
+ sats: int
# ALP/SLP value attached to the output
token: Optional[Token]
diff --git a/chronik/chronik-plugin-impl/src/tx.rs b/chronik/chronik-plugin-impl/src/tx.rs
--- a/chronik/chronik-plugin-impl/src/tx.rs
+++ b/chronik/chronik-plugin-impl/src/tx.rs
@@ -151,7 +151,7 @@
) -> PyResult<PyObject> {
let kwargs = PyDict::new(py);
kwargs.set_item("script", self.bridge_script(py, &output.script)?)?;
- kwargs.set_item("value", output.value)?;
+ kwargs.set_item("sats", output.sats)?;
kwargs.set_item(
"token",
token_output
@@ -220,7 +220,7 @@
) -> PyResult<PyObject> {
let kwargs = PyDict::new(py);
kwargs.set_item("script", self.bridge_script(py, &output.script)?)?;
- kwargs.set_item("value", output.value)?;
+ kwargs.set_item("sats", output.sats)?;
kwargs.set_item(
"token",
spent_token
diff --git a/chronik/chronik-plugin-impl/tests/test_tx_to_py.py b/chronik/chronik-plugin-impl/tests/test_tx_to_py.py
--- a/chronik/chronik-plugin-impl/tests/test_tx_to_py.py
+++ b/chronik/chronik-plugin-impl/tests/test_tx_to_py.py
@@ -8,13 +8,13 @@
from test_framework.util import assert_equal
-def slp_amount(token_id: str, token_type: int, amount: int, entry_idx=0) -> Token:
+def slp_atoms(token_id: str, token_type: int, atoms: int, entry_idx=0) -> Token:
return Token(
token_id=token_id,
token_protocol="SLP",
token_type=token_type,
entry_idx=entry_idx,
- amount=amount,
+ atoms=atoms,
is_mint_baton=False,
)
@@ -25,18 +25,18 @@
token_protocol="SLP",
token_type=token_type,
entry_idx=entry_idx,
- amount=0,
+ atoms=0,
is_mint_baton=True,
)
-def alp_amount(token_id: str, token_type: int, amount: int, entry_idx=0) -> Token:
+def alp_atoms(token_id: str, token_type: int, atoms: int, entry_idx=0) -> Token:
return Token(
token_id=token_id,
token_protocol="ALP",
token_type=token_type,
entry_idx=entry_idx,
- amount=amount,
+ atoms=atoms,
is_mint_baton=False,
)
@@ -47,7 +47,7 @@
token_protocol="ALP",
token_type=token_type,
entry_idx=entry_idx,
- amount=0,
+ atoms=0,
is_mint_baton=True,
)
@@ -65,7 +65,7 @@
script=CScript(
bytes.fromhex("a914020202020202020202020202020202020202020287")
),
- value=50000,
+ sats=50000,
token=None,
),
sequence=0x12345678,
@@ -87,7 +87,7 @@
script=CScript(
bytes.fromhex("76a914060606060606060606060606060606060606060688ac")
),
- value=40000,
+ sats=40000,
token=None,
),
],
@@ -104,7 +104,7 @@
[output.token for output in tx.outputs],
[
None,
- slp_amount("02" * 32, 1, 1234),
+ slp_atoms("02" * 32, 1, 1234),
slp_baton("02" * 32, 1),
],
)
@@ -139,7 +139,7 @@
[output.token for output in tx.outputs],
[
None,
- slp_amount("02" * 32, 2, 1234),
+ slp_atoms("02" * 32, 2, 1234),
],
)
assert_equal(
@@ -171,14 +171,14 @@
assert_equal(
[inpt.output.token for inpt in tx.inputs],
[
- slp_amount("03" * 32, 0x81, 1, entry_idx=1),
+ slp_atoms("03" * 32, 0x81, 1, entry_idx=1),
],
)
assert_equal(
[output.token for output in tx.outputs],
[
None,
- slp_amount("02" * 32, 0x41, 1),
+ slp_atoms("02" * 32, 0x41, 1),
],
)
assert_equal(
@@ -222,7 +222,7 @@
[output.token for output in tx.outputs],
[
None,
- slp_amount("03" * 32, 1, 1234),
+ slp_atoms("03" * 32, 1, 1234),
slp_baton("03" * 32, 1),
],
)
@@ -244,16 +244,16 @@
assert_equal(
[inpt.output.token for inpt in tx.inputs],
[
- slp_amount("03" * 32, 1, 20),
+ slp_atoms("03" * 32, 1, 20),
],
)
assert_equal(
[output.token for output in tx.outputs],
[
None,
- slp_amount("03" * 32, 1, 5),
- slp_amount("03" * 32, 1, 6),
- slp_amount("03" * 32, 1, 7),
+ slp_atoms("03" * 32, 1, 5),
+ slp_atoms("03" * 32, 1, 6),
+ slp_atoms("03" * 32, 1, 7),
],
)
assert_equal(
@@ -264,7 +264,7 @@
token_protocol="SLP",
token_type=1,
tx_type="SEND",
- actual_burn_amount=2,
+ actual_burn_atoms=2,
),
],
)
@@ -275,7 +275,7 @@
assert_equal(
[inpt.output.token for inpt in tx.inputs],
[
- slp_amount("03" * 32, 1, 600),
+ slp_atoms("03" * 32, 1, 600),
],
)
assert_equal(
@@ -292,8 +292,8 @@
token_protocol="SLP",
token_type=1,
tx_type="BURN",
- actual_burn_amount=600,
- intentional_burn_amount=500,
+ actual_burn_atoms=600,
+ intentional_burn_atoms=500,
),
],
)
@@ -337,8 +337,8 @@
token_protocol="ALP",
token_type=0,
tx_type="SEND",
- actual_burn_amount=500,
- intentional_burn_amount=1000,
+ actual_burn_atoms=500,
+ intentional_burn_atoms=1000,
),
)
assert_equal(
@@ -385,7 +385,7 @@
token_protocol="SLP",
token_type=1,
is_invalid=True,
- actual_burn_amount=30,
+ actual_burn_atoms=30,
),
)
assert_equal(
@@ -395,7 +395,7 @@
token_protocol="SLP",
token_type=2,
is_invalid=True,
- actual_burn_amount=20,
+ actual_burn_atoms=20,
),
)
assert_equal(
@@ -405,7 +405,7 @@
token_protocol="SLP",
token_type=0x81,
is_invalid=True,
- actual_burn_amount=20,
+ actual_burn_atoms=20,
),
)
assert_equal(
@@ -416,7 +416,7 @@
token_type=0x41,
group_token_id="0606060606060606060606060606060606060606060606060606060606060606",
is_invalid=True,
- actual_burn_amount=1,
+ actual_burn_atoms=1,
),
)
assert_equal(
@@ -424,30 +424,30 @@
[
alp_baton("02" * 32, 0, entry_idx=1),
None,
- alp_amount("03" * 32, 0, 2000, entry_idx=2),
- alp_amount("03" * 32, 0, 5000, entry_idx=2),
- slp_amount("04" * 32, 1, 30, entry_idx=7),
- slp_amount("05" * 32, 2, 20, entry_idx=8),
- slp_amount("06" * 32, 0x81, 20, entry_idx=9),
- slp_amount("07" * 32, 0x41, 1, entry_idx=10),
- alp_amount("00" * 32, 3, 0, entry_idx=6),
- slp_amount("00" * 32, 3, 0, entry_idx=5),
+ alp_atoms("03" * 32, 0, 2000, entry_idx=2),
+ alp_atoms("03" * 32, 0, 5000, entry_idx=2),
+ slp_atoms("04" * 32, 1, 30, entry_idx=7),
+ slp_atoms("05" * 32, 2, 20, entry_idx=8),
+ slp_atoms("06" * 32, 0x81, 20, entry_idx=9),
+ slp_atoms("07" * 32, 0x41, 1, entry_idx=10),
+ alp_atoms("00" * 32, 3, 0, entry_idx=6),
+ slp_atoms("00" * 32, 3, 0, entry_idx=5),
],
)
assert_equal(
[output.token for output in tx.outputs],
[
None,
- alp_amount("02" * 32, 0, 1000, entry_idx=1),
- alp_amount("03" * 32, 0, 500, entry_idx=2),
- alp_amount("01" * 32, 0, 10, entry_idx=0),
+ alp_atoms("02" * 32, 0, 1000, entry_idx=1),
+ alp_atoms("03" * 32, 0, 500, entry_idx=2),
+ alp_atoms("01" * 32, 0, 10, entry_idx=0),
alp_baton("02" * 32, 0, entry_idx=1),
None,
alp_baton("01" * 32, 0, entry_idx=0),
alp_baton("01" * 32, 0, entry_idx=0),
- alp_amount("00" * 32, 2, 0, entry_idx=4),
- alp_amount("03" * 32, 0, 6000, entry_idx=2),
- alp_amount("00" * 32, 2, 0, entry_idx=4),
+ alp_atoms("00" * 32, 2, 0, entry_idx=4),
+ alp_atoms("03" * 32, 0, 6000, entry_idx=2),
+ alp_atoms("00" * 32, 2, 0, entry_idx=4),
],
)
@@ -461,13 +461,13 @@
token_protocol="ALP",
token_type=0,
is_invalid=True,
- actual_burn_amount=200,
+ actual_burn_atoms=200,
)
],
)
assert_equal(
[inpt.output.token for inpt in tx.inputs],
- [alp_amount("02" * 32, 0, 200)],
+ [alp_atoms("02" * 32, 0, 200)],
)
assert_equal(
[output.token for output in tx.outputs],
diff --git a/chronik/chronik-plugin-impl/tests/test_tx_to_py.rs b/chronik/chronik-plugin-impl/tests/test_tx_to_py.rs
--- a/chronik/chronik-plugin-impl/tests/test_tx_to_py.rs
+++ b/chronik/chronik-plugin-impl/tests/test_tx_to_py.rs
@@ -20,7 +20,7 @@
slp::{burn_opreturn, genesis_opreturn, mint_opreturn, send_opreturn},
structs::{GenesisInfo, Token, TokenVariant},
test_helpers::{
- meta_alp, meta_alp_unknown, meta_slp, spent_amount, spent_amount_group,
+ meta_alp, meta_alp_unknown, meta_slp, spent_atoms, spent_atoms_group,
spent_baton, EMPTY_TOKEN_ID, TOKEN_ID2, TOKEN_ID3, TOKEN_ID4,
TOKEN_ID5, TOKEN_ID6, TOKEN_ID7, TOKEN_ID8,
},
@@ -72,7 +72,7 @@
})
.collect(),
outputs: [TxOutput {
- value: 0,
+ sats: 0,
script: params.op_return_script.clone(),
}]
.into_iter()
@@ -131,7 +131,7 @@
coin: Some(Coin {
output: TxOutput {
script: Script::p2sh(&ShaRmd160([2; 20])),
- value: 50000,
+ sats: 50000,
},
height: 0,
is_coinbase: false,
@@ -146,7 +146,7 @@
},
],
outputs: vec![TxOutput {
- value: 40000,
+ sats: 40000,
script: Script::p2pkh(&ShaRmd160([6; 20])),
}],
locktime: 0x87654321,
@@ -253,7 +253,7 @@
None,
1,
),
- spent_tokens: &[spent_amount(
+ spent_tokens: &[spent_atoms(
meta_slp(TOKEN_ID3, SlpTokenType::Nft1Group),
1,
)],
@@ -300,7 +300,7 @@
SlpTokenType::Fungible,
&[5, 6, 7],
),
- spent_tokens: &[spent_amount(
+ spent_tokens: &[spent_atoms(
meta_slp(TOKEN_ID3, SlpTokenType::Fungible),
20,
)],
@@ -323,7 +323,7 @@
SlpTokenType::Fungible,
500,
),
- spent_tokens: &[spent_amount(
+ spent_tokens: &[spent_atoms(
meta_slp(TOKEN_ID3, SlpTokenType::Fungible),
600,
)],
@@ -383,21 +383,21 @@
spent_tokens: &[
spent_baton(meta_alp(TOKEN_ID2)),
None,
- spent_amount(meta_alp(TOKEN_ID3), 2000),
- spent_amount(meta_alp(TOKEN_ID3), 5000),
- spent_amount(
+ spent_atoms(meta_alp(TOKEN_ID3), 2000),
+ spent_atoms(meta_alp(TOKEN_ID3), 5000),
+ spent_atoms(
meta_slp(TOKEN_ID4, SlpTokenType::Fungible),
30,
),
- spent_amount(
+ spent_atoms(
meta_slp(TOKEN_ID5, SlpTokenType::MintVault),
20,
),
- spent_amount(
+ spent_atoms(
meta_slp(TOKEN_ID6, SlpTokenType::Nft1Group),
20,
),
- spent_amount_group(
+ spent_atoms_group(
meta_slp(TOKEN_ID7, SlpTokenType::Nft1Child),
1,
meta_slp(TOKEN_ID6, SlpTokenType::Nft1Group),
@@ -435,7 +435,7 @@
txid_num: 1,
num_outputs: 1,
op_return_script: Script::default(),
- spent_tokens: &[spent_amount(meta_alp(TOKEN_ID2), 200)],
+ spent_tokens: &[spent_atoms(meta_alp(TOKEN_ID2), 200)],
..Default::default()
},
)?;
diff --git a/chronik/chronik-proto/proto/chronik.proto b/chronik/chronik-proto/proto/chronik.proto
--- a/chronik/chronik-proto/proto/chronik.proto
+++ b/chronik/chronik-proto/proto/chronik.proto
@@ -120,7 +120,7 @@
// Whether the UTXO has been created in a coinbase tx.
bool is_coinbase = 3;
// Value of the output, in satoshis.
- int64 value = 5;
+ int64 sats = 5;
// Whether the UTXO has been finalized by Avalanche.
bool is_final = 10;
// Token value attached to this UTXO
@@ -138,7 +138,7 @@
// Whether the UTXO has been created in a coinbase tx.
bool is_coinbase = 3;
// Value of the output, in satoshis.
- int64 value = 4;
+ int64 sats = 4;
// Bytecode of the script of the output
bytes script = 5;
// Whether the UTXO has been finalized by Avalanche.
@@ -174,7 +174,7 @@
// scriptPubKey, script of the output locking the coin.
bytes output_script = 3;
// value of the output being spent, in satoshis.
- int64 value = 4;
+ int64 sats = 4;
// nSequence of the input.
uint32 sequence_no = 5;
// Token value attached to this input
@@ -186,7 +186,7 @@
// CTxOut, creates a new coin.
message TxOutput {
// Value of the coin, in satoshis.
- int64 value = 1;
+ int64 sats = 1;
// scriptPubKey, script locking the output.
bytes output_script = 2;
// Which tx and input spent this output, if any.
@@ -300,11 +300,11 @@
string burn_summary = 6;
// Human-readable error messages of why colorings failed
repeated TokenFailedColoring failed_colorings = 7;
- // Number of actually burned tokens (as decimal integer string, e.g. "2000").
+ // Number of actually burned tokens (as decimal integer string, e.g. "2000"; in atoms aka base tokens).
// This is because burns can exceed the 64-bit range of values and protobuf doesn't have a nice type to encode this.
- string actual_burn_amount = 8;
- // Burn amount the user explicitly opted into
- uint64 intentional_burn = 9;
+ string actual_burn_atoms = 8;
+ // Burn amount (in atoms aka base tokens) the user explicitly opted into
+ uint64 intentional_burn_atoms = 9;
// Whether any mint batons have been burned of this token
bool burns_mint_batons = 10;
}
@@ -338,7 +338,7 @@
// Index into `token_entries` for `Tx`. -1 for UTXOs
int32 entry_idx = 3;
// Base token amount of the input/output
- uint64 amount = 4;
+ uint64 atoms = 4;
// Whether the token is a mint baton
bool is_mint_baton = 5;
}
diff --git a/chronik/test/bridgeprimitives_tests.cpp b/chronik/test/bridgeprimitives_tests.cpp
--- a/chronik/test/bridgeprimitives_tests.cpp
+++ b/chronik/test/bridgeprimitives_tests.cpp
@@ -40,7 +40,7 @@
BOOST_CHECK_EQUAL(inLeft.prev_out.out_idx, inRight.prev_out.out_idx);
BOOST_CHECK_EQUAL(HexStr(inLeft.script), HexStr(inRight.script));
BOOST_CHECK_EQUAL(inLeft.sequence, inRight.sequence);
- BOOST_CHECK_EQUAL(inLeft.coin.output.value, inRight.coin.output.value);
+ BOOST_CHECK_EQUAL(inLeft.coin.output.sats, inRight.coin.output.sats);
BOOST_CHECK_EQUAL(HexStr(inLeft.coin.output.script),
HexStr(inRight.coin.output.script));
BOOST_CHECK_EQUAL(inLeft.coin.height, inRight.coin.height);
@@ -50,7 +50,7 @@
for (size_t outputIdx = 0; outputIdx < left.outputs.size(); ++outputIdx) {
const chronik_bridge::TxOutput &outLeft = left.outputs[outputIdx];
const chronik_bridge::TxOutput &outRight = right.outputs.at(outputIdx);
- BOOST_CHECK_EQUAL(outLeft.value, outRight.value);
+ BOOST_CHECK_EQUAL(outLeft.sats, outRight.sats);
BOOST_CHECK_EQUAL(HexStr(outLeft.script), HexStr(outRight.script));
}
}
@@ -100,7 +100,7 @@
const chronik_bridge::Coin &bridgeCoin =
blockTx.tx.inputs[inputIdx].coin;
BOOST_CHECK_EQUAL(coin.GetTxOut().nValue / SATOSHI,
- bridgeCoin.output.value);
+ bridgeCoin.output.sats);
BOOST_CHECK_EQUAL(HexStr(coin.GetTxOut().scriptPubKey),
HexStr(bridgeCoin.output.script));
BOOST_CHECK_EQUAL(coin.GetHeight(), bridgeCoin.height);
@@ -138,7 +138,7 @@
.coin = {}, // null coin
}},
.outputs = {{
- .value = 5000000000,
+ .sats = 5000000000,
.script =
ToRustVec<uint8_t>(genesisBlock.vtx[0]->vout[0].scriptPubKey),
}},
@@ -235,7 +235,7 @@
.coin = {}, // null coin
}},
.outputs = {{
- .value = 2500000000,
+ .sats = 2500000000,
.script = {0x52},
}},
.locktime = 0,
@@ -261,47 +261,52 @@
{0, ToRustVec<uint8_t>(scriptPad)}},
.locktime = 123,
};
- chronik_bridge::Tx expectedTestTx2 = {
- .txid = HashToArray(tx2.GetId()),
- .version = 1,
- .inputs = {chronik_bridge::TxInput({
- .prev_out = chronik_bridge::OutPoint({
- .txid = HashToArray(tx1.GetId()),
- .out_idx = 0,
- }),
- .script = {},
- .sequence = 0xffff'ffff,
- .coin =
- {
- .output = {4999990000, {0x53}},
- .height = 202,
- .is_coinbase = false,
- },
- }),
- chronik_bridge::TxInput({
- .prev_out = chronik_bridge::OutPoint({
- .txid = HashToArray(tx1.GetId()),
- .out_idx = 1,
- }),
- .script = {},
- .sequence = 0xffff'ffff,
- .coin =
- {
- .output = {1000, {0x54}},
- .height = 202,
- .is_coinbase = false,
- },
- })},
- .outputs = {{
- .value = 4999970000,
- .script = {0x55},
- },
- {
- .value = 0,
- .script = ToRustVec<uint8_t>(scriptPad),
- }},
- .locktime = 0,
- };
+ chronik_bridge::Tx
+ expectedTestTx2 =
+ {
+ .txid = HashToArray(tx2.GetId()),
+ .version = 1,
+ .inputs = {chronik_bridge::TxInput(
+ {
+ .prev_out = chronik_bridge::OutPoint({
+ .txid = HashToArray(tx1.GetId()),
+ .out_idx = 0,
+ }),
+ .script = {},
+ .sequence = 0xffff'ffff,
+ .coin =
+ {
+ .output = {4999990000, {0x53}},
+ .height = 202,
+ .is_coinbase = false,
+ },
+ }),
+ chronik_bridge::TxInput(
+ {
+ .prev_out = chronik_bridge::OutPoint(
+ {
+ .txid = HashToArray(tx1.GetId()),
+ .out_idx = 1,
+ }),
+ .script = {},
+ .sequence = 0xffff'ffff,
+ .coin =
+ {
+ .output = {1000, {0x54}},
+ .height = 202,
+ .is_coinbase = false,
+ },
+ })},
+ .outputs = {{
+ .sats = 4999970000,
+ .script = {0x55},
+ },
+ {
+ .sats = 0,
+ .script = ToRustVec<uint8_t>(scriptPad),
+ }},
+ .locktime = 0,
+ };
chronik_bridge::Block expectedBridgedTestBlock = {
.hash = HashToArray(testBlock.GetHash()),
.prev_hash = HashToArray(testBlock.hashPrevBlock),
diff --git a/chronik/test/chronikbridge_tests.cpp b/chronik/test/chronikbridge_tests.cpp
--- a/chronik/test/chronikbridge_tests.cpp
+++ b/chronik/test/chronikbridge_tests.cpp
@@ -154,10 +154,10 @@
// lookup_spent_coins mutates our query_tx to set the queried coins
const rust::Vec<uint8_t> &script0 = query_tx.inputs[0].coin.output.script;
const rust::Vec<uint8_t> &script1 = query_tx.inputs[1].coin.output.script;
- BOOST_CHECK_EQUAL(query_tx.inputs[0].coin.output.value, 1000);
+ BOOST_CHECK_EQUAL(query_tx.inputs[0].coin.output.sats, 1000);
BOOST_CHECK(CScript(script0.data(), script0.data() + script0.size()) ==
anyoneP2sh);
- BOOST_CHECK_EQUAL(query_tx.inputs[1].coin.output.value,
+ BOOST_CHECK_EQUAL(query_tx.inputs[1].coin.output.sats,
coinTx->vout[0].nValue / SATOSHI - 10000);
BOOST_CHECK(CScript(script1.data(), script1.data() + script1.size()) ==
anyoneP2sh);
diff --git a/modules/bitcoinsuite-chronik-client/tests/test_chronik_client.rs b/modules/bitcoinsuite-chronik-client/tests/test_chronik_client.rs
--- a/modules/bitcoinsuite-chronik-client/tests/test_chronik_client.rs
+++ b/modules/bitcoinsuite-chronik-client/tests/test_chronik_client.rs
@@ -198,14 +198,14 @@
a6909a5cb2e0eaddfb84ccf9744464f82e160bfa9b8b64f9d4c03f999b864\
3f656b412a3ac",
)?,
- value: 5_000_000_000,
+ sats: 5_000_000_000,
sequence_no: 0xffffffff,
token: None,
plugins: HashMap::new(),
}],
outputs: vec![
proto::TxOutput {
- value: 1_000_000_000,
+ sats: 1_000_000_000,
output_script: hex::decode(
"4104ae1a62fe09c5f51b13905f07f06b99a2f7159b2225f374cd378d7\
1302fa28414e7aab37397f554a7df5f142c21c1b7303b8a0626f1bade\
@@ -224,7 +224,7 @@
plugins: HashMap::new(),
},
proto::TxOutput {
- value: 4_000_000_000,
+ sats: 4_000_000_000,
output_script: hex::decode(
"410411db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482ecad7b\
148a6909a5cb2e0eaddfb84ccf9744464f82e160bfa9b8b64f9d4c03f\
diff --git a/modules/chronik-client/README.md b/modules/chronik-client/README.md
--- a/modules/chronik-client/README.md
+++ b/modules/chronik-client/README.md
@@ -117,3 +117,4 @@
- 2.0.0 - **(Breaking change)** Change `auth` in `GenesisInfo` to hex string instead of `Uint8Array`, maintaining consistency with other API behavior [D17194](https://reviews.bitcoinabc.org/D17194)
- 2.1.0 - Add support for `validateRawTx` endpoint [D15631](https://reviews.bitcoinabc.org/D15631)
- 2.1.1 - Upgrade to dependency-free `ecashaddrjs` [D17269](https://reviews.bitcoinabc.org/D17269)
+- 3.0.0 - Proto update; `atoms` instead of `amount` and `sats` instead of `value` [D17650](https://reviews.bitcoinabc.org/D17650)
diff --git a/modules/chronik-client/package-lock.json b/modules/chronik-client/package-lock.json
--- a/modules/chronik-client/package-lock.json
+++ b/modules/chronik-client/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "chronik-client",
- "version": "2.1.1",
+ "version": "3.0.0",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "chronik-client",
- "version": "2.1.1",
+ "version": "3.0.0",
"license": "MIT",
"dependencies": {
"@types/ws": "^8.2.1",
diff --git a/modules/chronik-client/package.json b/modules/chronik-client/package.json
--- a/modules/chronik-client/package.json
+++ b/modules/chronik-client/package.json
@@ -1,6 +1,6 @@
{
"name": "chronik-client",
- "version": "2.1.1",
+ "version": "3.0.0",
"description": "A client for accessing the Chronik Indexer API",
"main": "dist/index.js",
"types": "dist/index.d.ts",
diff --git a/modules/chronik-client/proto/chronik.ts b/modules/chronik-client/proto/chronik.ts
--- a/modules/chronik-client/proto/chronik.ts
+++ b/modules/chronik-client/proto/chronik.ts
@@ -416,7 +416,7 @@
/** Whether the UTXO has been created in a coinbase tx. */
isCoinbase: boolean;
/** Value of the output, in satoshis. */
- value: string;
+ sats: string;
/** Whether the UTXO has been finalized by Avalanche. */
isFinal: boolean;
/** Token value attached to this UTXO */
@@ -439,7 +439,7 @@
/** Whether the UTXO has been created in a coinbase tx. */
isCoinbase: boolean;
/** Value of the output, in satoshis. */
- value: string;
+ sats: string;
/** Bytecode of the script of the output */
script: Uint8Array;
/** Whether the UTXO has been finalized by Avalanche. */
@@ -480,7 +480,7 @@
/** scriptPubKey, script of the output locking the coin. */
outputScript: Uint8Array;
/** value of the output being spent, in satoshis. */
- value: string;
+ sats: string;
/** nSequence of the input. */
sequenceNo: number;
/** Token value attached to this input */
@@ -497,7 +497,7 @@
/** CTxOut, creates a new coin. */
export interface TxOutput {
/** Value of the coin, in satoshis. */
- value: string;
+ sats: string;
/** scriptPubKey, script locking the output. */
outputScript: Uint8Array;
/** Which tx and input spent this output, if any. */
@@ -571,12 +571,12 @@
/** Human-readable error messages of why colorings failed */
failedColorings: TokenFailedColoring[];
/**
- * Number of actually burned tokens (as decimal integer string, e.g. "2000").
+ * Number of actually burned tokens (as decimal integer string, e.g. "2000"; in atoms aka base tokens).
* This is because burns can exceed the 64-bit range of values and protobuf doesn't have a nice type to encode this.
*/
- actualBurnAmount: string;
- /** Burn amount the user explicitly opted into */
- intentionalBurn: string;
+ actualBurnAtoms: string;
+ /** Burn amount (in atoms aka base tokens) the user explicitly opted into */
+ intentionalBurnAtoms: string;
/** Whether any mint batons have been burned of this token */
burnsMintBatons: boolean;
}
@@ -610,7 +610,7 @@
/** Index into `token_entries` for `Tx`. -1 for UTXOs */
entryIdx: number;
/** Base token amount of the input/output */
- amount: string;
+ atoms: string;
/** Whether the token is a mint baton */
isMintBaton: boolean;
}
@@ -1940,7 +1940,7 @@
outpoint: undefined,
blockHeight: 0,
isCoinbase: false,
- value: '0',
+ sats: '0',
isFinal: false,
token: undefined,
plugins: {},
@@ -1964,8 +1964,8 @@
if (message.isCoinbase === true) {
writer.uint32(24).bool(message.isCoinbase);
}
- if (message.value !== '0') {
- writer.uint32(40).int64(message.value);
+ if (message.sats !== '0') {
+ writer.uint32(40).int64(message.sats);
}
if (message.isFinal === true) {
writer.uint32(80).bool(message.isFinal);
@@ -2016,7 +2016,7 @@
break;
}
- message.value = longToString(reader.int64() as Long);
+ message.sats = longToString(reader.int64() as Long);
continue;
case 10:
if (tag !== 80) {
@@ -2065,7 +2065,7 @@
isCoinbase: isSet(object.isCoinbase)
? globalThis.Boolean(object.isCoinbase)
: false,
- value: isSet(object.value) ? globalThis.String(object.value) : '0',
+ sats: isSet(object.sats) ? globalThis.String(object.sats) : '0',
isFinal: isSet(object.isFinal)
? globalThis.Boolean(object.isFinal)
: false,
@@ -2094,8 +2094,8 @@
if (message.isCoinbase === true) {
obj.isCoinbase = message.isCoinbase;
}
- if (message.value !== '0') {
- obj.value = message.value;
+ if (message.sats !== '0') {
+ obj.sats = message.sats;
}
if (message.isFinal === true) {
obj.isFinal = message.isFinal;
@@ -2128,7 +2128,7 @@
: undefined;
message.blockHeight = object.blockHeight ?? 0;
message.isCoinbase = object.isCoinbase ?? false;
- message.value = object.value ?? '0';
+ message.sats = object.sats ?? '0';
message.isFinal = object.isFinal ?? false;
message.token =
object.token !== undefined && object.token !== null
@@ -2244,7 +2244,7 @@
outpoint: undefined,
blockHeight: 0,
isCoinbase: false,
- value: '0',
+ sats: '0',
script: new Uint8Array(0),
isFinal: false,
token: undefined,
@@ -2269,8 +2269,8 @@
if (message.isCoinbase === true) {
writer.uint32(24).bool(message.isCoinbase);
}
- if (message.value !== '0') {
- writer.uint32(32).int64(message.value);
+ if (message.sats !== '0') {
+ writer.uint32(32).int64(message.sats);
}
if (message.script.length !== 0) {
writer.uint32(42).bytes(message.script);
@@ -2324,7 +2324,7 @@
break;
}
- message.value = longToString(reader.int64() as Long);
+ message.sats = longToString(reader.int64() as Long);
continue;
case 5:
if (tag !== 42) {
@@ -2380,7 +2380,7 @@
isCoinbase: isSet(object.isCoinbase)
? globalThis.Boolean(object.isCoinbase)
: false,
- value: isSet(object.value) ? globalThis.String(object.value) : '0',
+ sats: isSet(object.sats) ? globalThis.String(object.sats) : '0',
script: isSet(object.script)
? bytesFromBase64(object.script)
: new Uint8Array(0),
@@ -2412,8 +2412,8 @@
if (message.isCoinbase === true) {
obj.isCoinbase = message.isCoinbase;
}
- if (message.value !== '0') {
- obj.value = message.value;
+ if (message.sats !== '0') {
+ obj.sats = message.sats;
}
if (message.script.length !== 0) {
obj.script = base64FromBytes(message.script);
@@ -2447,7 +2447,7 @@
: undefined;
message.blockHeight = object.blockHeight ?? 0;
message.isCoinbase = object.isCoinbase ?? false;
- message.value = object.value ?? '0';
+ message.sats = object.sats ?? '0';
message.script = object.script ?? new Uint8Array(0);
message.isFinal = object.isFinal ?? false;
message.token =
@@ -2725,7 +2725,7 @@
prevOut: undefined,
inputScript: new Uint8Array(0),
outputScript: new Uint8Array(0),
- value: '0',
+ sats: '0',
sequenceNo: 0,
token: undefined,
plugins: {},
@@ -2746,8 +2746,8 @@
if (message.outputScript.length !== 0) {
writer.uint32(26).bytes(message.outputScript);
}
- if (message.value !== '0') {
- writer.uint32(32).int64(message.value);
+ if (message.sats !== '0') {
+ writer.uint32(32).int64(message.sats);
}
if (message.sequenceNo !== 0) {
writer.uint32(40).uint32(message.sequenceNo);
@@ -2798,7 +2798,7 @@
break;
}
- message.value = longToString(reader.int64() as Long);
+ message.sats = longToString(reader.int64() as Long);
continue;
case 5:
if (tag !== 40) {
@@ -2847,7 +2847,7 @@
outputScript: isSet(object.outputScript)
? bytesFromBase64(object.outputScript)
: new Uint8Array(0),
- value: isSet(object.value) ? globalThis.String(object.value) : '0',
+ sats: isSet(object.sats) ? globalThis.String(object.sats) : '0',
sequenceNo: isSet(object.sequenceNo)
? globalThis.Number(object.sequenceNo)
: 0,
@@ -2876,8 +2876,8 @@
if (message.outputScript.length !== 0) {
obj.outputScript = base64FromBytes(message.outputScript);
}
- if (message.value !== '0') {
- obj.value = message.value;
+ if (message.sats !== '0') {
+ obj.sats = message.sats;
}
if (message.sequenceNo !== 0) {
obj.sequenceNo = Math.round(message.sequenceNo);
@@ -2908,7 +2908,7 @@
: undefined;
message.inputScript = object.inputScript ?? new Uint8Array(0);
message.outputScript = object.outputScript ?? new Uint8Array(0);
- message.value = object.value ?? '0';
+ message.sats = object.sats ?? '0';
message.sequenceNo = object.sequenceNo ?? 0;
message.token =
object.token !== undefined && object.token !== null
@@ -3021,7 +3021,7 @@
function createBaseTxOutput(): TxOutput {
return {
- value: '0',
+ sats: '0',
outputScript: new Uint8Array(0),
spentBy: undefined,
token: undefined,
@@ -3034,8 +3034,8 @@
message: TxOutput,
writer: _m0.Writer = _m0.Writer.create(),
): _m0.Writer {
- if (message.value !== '0') {
- writer.uint32(8).int64(message.value);
+ if (message.sats !== '0') {
+ writer.uint32(8).int64(message.sats);
}
if (message.outputScript.length !== 0) {
writer.uint32(18).bytes(message.outputScript);
@@ -3068,7 +3068,7 @@
break;
}
- message.value = longToString(reader.int64() as Long);
+ message.sats = longToString(reader.int64() as Long);
continue;
case 2:
if (tag !== 18) {
@@ -3115,7 +3115,7 @@
fromJSON(object: any): TxOutput {
return {
- value: isSet(object.value) ? globalThis.String(object.value) : '0',
+ sats: isSet(object.sats) ? globalThis.String(object.sats) : '0',
outputScript: isSet(object.outputScript)
? bytesFromBase64(object.outputScript)
: new Uint8Array(0),
@@ -3138,8 +3138,8 @@
toJSON(message: TxOutput): unknown {
const obj: any = {};
- if (message.value !== '0') {
- obj.value = message.value;
+ if (message.sats !== '0') {
+ obj.sats = message.sats;
}
if (message.outputScript.length !== 0) {
obj.outputScript = base64FromBytes(message.outputScript);
@@ -3169,7 +3169,7 @@
object: I,
): TxOutput {
const message = createBaseTxOutput();
- message.value = object.value ?? '0';
+ message.sats = object.sats ?? '0';
message.outputScript = object.outputScript ?? new Uint8Array(0);
message.spentBy =
object.spentBy !== undefined && object.spentBy !== null
@@ -3670,8 +3670,8 @@
isInvalid: false,
burnSummary: '',
failedColorings: [],
- actualBurnAmount: '',
- intentionalBurn: '0',
+ actualBurnAtoms: '',
+ intentionalBurnAtoms: '0',
burnsMintBatons: false,
};
}
@@ -3705,11 +3705,11 @@
for (const v of message.failedColorings) {
TokenFailedColoring.encode(v!, writer.uint32(58).fork()).ldelim();
}
- if (message.actualBurnAmount !== '') {
- writer.uint32(66).string(message.actualBurnAmount);
+ if (message.actualBurnAtoms !== '') {
+ writer.uint32(66).string(message.actualBurnAtoms);
}
- if (message.intentionalBurn !== '0') {
- writer.uint32(72).uint64(message.intentionalBurn);
+ if (message.intentionalBurnAtoms !== '0') {
+ writer.uint32(72).uint64(message.intentionalBurnAtoms);
}
if (message.burnsMintBatons === true) {
writer.uint32(80).bool(message.burnsMintBatons);
@@ -3784,14 +3784,14 @@
break;
}
- message.actualBurnAmount = reader.string();
+ message.actualBurnAtoms = reader.string();
continue;
case 9:
if (tag !== 72) {
break;
}
- message.intentionalBurn = longToString(
+ message.intentionalBurnAtoms = longToString(
reader.uint64() as Long,
);
continue;
@@ -3836,11 +3836,11 @@
TokenFailedColoring.fromJSON(e),
)
: [],
- actualBurnAmount: isSet(object.actualBurnAmount)
- ? globalThis.String(object.actualBurnAmount)
+ actualBurnAtoms: isSet(object.actualBurnAtoms)
+ ? globalThis.String(object.actualBurnAtoms)
: '',
- intentionalBurn: isSet(object.intentionalBurn)
- ? globalThis.String(object.intentionalBurn)
+ intentionalBurnAtoms: isSet(object.intentionalBurnAtoms)
+ ? globalThis.String(object.intentionalBurnAtoms)
: '0',
burnsMintBatons: isSet(object.burnsMintBatons)
? globalThis.Boolean(object.burnsMintBatons)
@@ -3873,11 +3873,11 @@
TokenFailedColoring.toJSON(e),
);
}
- if (message.actualBurnAmount !== '') {
- obj.actualBurnAmount = message.actualBurnAmount;
+ if (message.actualBurnAtoms !== '') {
+ obj.actualBurnAtoms = message.actualBurnAtoms;
}
- if (message.intentionalBurn !== '0') {
- obj.intentionalBurn = message.intentionalBurn;
+ if (message.intentionalBurnAtoms !== '0') {
+ obj.intentionalBurnAtoms = message.intentionalBurnAtoms;
}
if (message.burnsMintBatons === true) {
obj.burnsMintBatons = message.burnsMintBatons;
@@ -3905,8 +3905,8 @@
object.failedColorings?.map(e =>
TokenFailedColoring.fromPartial(e),
) || [];
- message.actualBurnAmount = object.actualBurnAmount ?? '';
- message.intentionalBurn = object.intentionalBurn ?? '0';
+ message.actualBurnAtoms = object.actualBurnAtoms ?? '';
+ message.intentionalBurnAtoms = object.intentionalBurnAtoms ?? '0';
message.burnsMintBatons = object.burnsMintBatons ?? false;
return message;
},
@@ -4117,7 +4117,7 @@
tokenId: '',
tokenType: undefined,
entryIdx: 0,
- amount: '0',
+ atoms: '0',
isMintBaton: false,
};
}
@@ -4139,8 +4139,8 @@
if (message.entryIdx !== 0) {
writer.uint32(24).int32(message.entryIdx);
}
- if (message.amount !== '0') {
- writer.uint32(32).uint64(message.amount);
+ if (message.atoms !== '0') {
+ writer.uint32(32).uint64(message.atoms);
}
if (message.isMintBaton === true) {
writer.uint32(40).bool(message.isMintBaton);
@@ -4185,7 +4185,7 @@
break;
}
- message.amount = longToString(reader.uint64() as Long);
+ message.atoms = longToString(reader.uint64() as Long);
continue;
case 5:
if (tag !== 40) {
@@ -4214,9 +4214,7 @@
entryIdx: isSet(object.entryIdx)
? globalThis.Number(object.entryIdx)
: 0,
- amount: isSet(object.amount)
- ? globalThis.String(object.amount)
- : '0',
+ atoms: isSet(object.atoms) ? globalThis.String(object.atoms) : '0',
isMintBaton: isSet(object.isMintBaton)
? globalThis.Boolean(object.isMintBaton)
: false,
@@ -4234,8 +4232,8 @@
if (message.entryIdx !== 0) {
obj.entryIdx = Math.round(message.entryIdx);
}
- if (message.amount !== '0') {
- obj.amount = message.amount;
+ if (message.atoms !== '0') {
+ obj.atoms = message.atoms;
}
if (message.isMintBaton === true) {
obj.isMintBaton = message.isMintBaton;
@@ -4254,7 +4252,7 @@
? TokenType.fromPartial(object.tokenType)
: undefined;
message.entryIdx = object.entryIdx ?? 0;
- message.amount = object.amount ?? '0';
+ message.atoms = object.atoms ?? '0';
message.isMintBaton = object.isMintBaton ?? false;
return message;
},
diff --git a/modules/chronik-client/src/ChronikClient.ts b/modules/chronik-client/src/ChronikClient.ts
--- a/modules/chronik-client/src/ChronikClient.ts
+++ b/modules/chronik-client/src/ChronikClient.ts
@@ -1090,7 +1090,7 @@
outIdx: input.prevOut.outIdx,
},
inputScript: toHex(input.inputScript),
- value: parseInt(input.value),
+ sats: BigInt(input.sats),
sequenceNo: input.sequenceNo,
};
if (typeof input.token !== 'undefined') {
@@ -1113,7 +1113,7 @@
function convertToTxOutput(output: proto.TxOutput): TxOutput {
const txOutput: TxOutput = {
- value: parseInt(output.value),
+ sats: BigInt(output.sats),
outputScript: toHex(output.outputScript),
};
if (Object.keys(output.plugins).length > 0) {
@@ -1188,7 +1188,7 @@
},
blockHeight: utxo.blockHeight,
isCoinbase: utxo.isCoinbase,
- value: parseInt(utxo.value),
+ sats: BigInt(utxo.sats),
isFinal: utxo.isFinal,
};
if (typeof utxo.token !== 'undefined') {
@@ -1214,7 +1214,7 @@
blockHeight: utxo.blockHeight,
isCoinbase: utxo.isCoinbase,
script: toHex(utxo.script),
- value: parseInt(utxo.value),
+ sats: BigInt(utxo.sats),
isFinal: utxo.isFinal,
};
if (typeof utxo.token !== 'undefined') {
@@ -1242,8 +1242,8 @@
isInvalid: tokenEntry.isInvalid,
burnSummary: tokenEntry.burnSummary,
failedColorings: tokenEntry.failedColorings,
- actualBurnAmount: tokenEntry.actualBurnAmount,
- intentionalBurn: tokenEntry.intentionalBurn,
+ actualBurnAtoms: BigInt(tokenEntry.actualBurnAtoms),
+ intentionalBurnAtoms: BigInt(tokenEntry.intentionalBurnAtoms),
burnsMintBatons: tokenEntry.burnsMintBatons,
};
if (tokenEntry.groupTokenId !== '') {
@@ -1341,7 +1341,7 @@
const convertedToken: Token = {
tokenId: token.tokenId,
tokenType: convertToTokenType(token.tokenType),
- amount: token.amount,
+ atoms: BigInt(token.atoms),
isMintBaton: token.isMintBaton,
};
@@ -1586,7 +1586,7 @@
*/
outputScript?: string;
/** Value of the output spent by this input, in satoshis. */
- value: number;
+ sats: bigint;
/** `sequence` field of the input; can be used for relative time locking. */
sequenceNo: number;
/** Token value attached to this input */
@@ -1598,7 +1598,7 @@
/** Output of a tx, creates new UTXOs. */
export interface TxOutput {
/** Value of the output, in satoshis. */
- value: number;
+ sats: bigint;
/**
* Script of this output, locking the coins.
* Aka. `scriptPubKey` in bitcoind parlance.
@@ -1651,13 +1651,10 @@
burnSummary: string;
/** Human-readable error messages of why colorings failed */
failedColorings: TokenFailedColoring[];
- /**
- * Number of actually burned tokens (as decimal integer string, e.g. "2000").
- * This is because burns can exceed the 64-bit range of values and protobuf doesn't have a nice type to encode this.
- */
- actualBurnAmount: string;
- /** Burn amount the user explicitly opted into (as decimal integer string) */
- intentionalBurn: string;
+ /** Number of actually burned tokens (in atoms, aka base tokens). */
+ actualBurnAtoms: bigint;
+ /** Burn amount the user explicitly opted into (in atoms, aka base tokens) */
+ intentionalBurnAtoms: bigint;
/** Whether any mint batons have been burned of this token */
burnsMintBatons: boolean;
}
@@ -1806,7 +1803,7 @@
* (make sure it's buried 100 blocks before spending!) */
isCoinbase: boolean;
/** Value of the UTXO in satoshis. */
- value: number;
+ sats: bigint;
/** Is this utxo avalanche finalized */
isFinal: boolean;
/** Token value attached to this utxo */
@@ -1828,7 +1825,7 @@
* (make sure it's buried 100 blocks before spending!) */
isCoinbase: boolean;
/** Value of the UTXO in satoshis. */
- value: number;
+ sats: bigint;
/** Bytecode of the script of the output */
script: string;
/** Is this utxo avalanche finalized */
@@ -1851,8 +1848,8 @@
* passes no entryIdx key for UTXOS
*/
entryIdx?: number;
- /** Base token amount of the input/output */
- amount: string;
+ /** Amount in atoms (aka base tokens) of the input/output */
+ atoms: bigint;
/** Whether the token is a mint baton */
isMintBaton: boolean;
}
diff --git a/modules/chronik-client/test/integration/broadcast_txs_and_validate_rawtx.ts b/modules/chronik-client/test/integration/broadcast_txs_and_validate_rawtx.ts
--- a/modules/chronik-client/test/integration/broadcast_txs_and_validate_rawtx.ts
+++ b/modules/chronik-client/test/integration/broadcast_txs_and_validate_rawtx.ts
@@ -171,15 +171,15 @@
const sameInvalidTx = await chronik.validateRawTx(fromHex(BAD_RAW_TX));
expect(sameInvalidTx.txid).to.eql(invalidTx.txid);
const invalidTxSumInputs = invalidTx.inputs
- .map(input => input.value)
- .reduce((prev, curr) => prev + curr, 0);
+ .map(input => input.sats)
+ .reduce((prev, curr) => prev + curr, 0n);
const invalidTxSumOutputs = invalidTx.outputs
- .map(output => output.value)
- .reduce((prev, curr) => prev + curr, 0);
+ .map(output => output.sats)
+ .reduce((prev, curr) => prev + curr, 0n);
// Indeed, the outputs are greater than the inputs, and such that the tx is invalid
- expect(invalidTxSumInputs).to.eql(BAD_VALUE_IN_SATS);
- expect(invalidTxSumOutputs).to.eql(BAD_VALUE_OUT_SATS);
+ expect(invalidTxSumInputs).to.eql(BigInt(BAD_VALUE_IN_SATS));
+ expect(invalidTxSumOutputs).to.eql(BigInt(BAD_VALUE_OUT_SATS));
// We cannot call validateRawTx to get a tx from a rawtx of a normal token send tx if its inputs are not in the mempool or db
// txid in blockchain but not regtest, 423e24bf0715cfb80727e5e7a6ff7b9e37cb2f555c537ab06fdc7fd9b3a0ba3a
@@ -213,7 +213,7 @@
// We can't broadcast an ALP burn tx without setting skipTokenChecks
await expect(chronik.broadcastTx(alpBurnRawTx)).to.be.rejectedWith(
Error,
- `Failed getting /broadcast-tx: 400: Tx ${alpBurnTxid} failed token checks: Unexpected burn: Burns 1 base tokens.`,
+ `Failed getting /broadcast-tx: 400: Tx ${alpBurnTxid} failed token checks: Unexpected burn: Burns 1 atoms.`,
);
// We also can't broadcast an array of txs if one tx is a burn
@@ -224,7 +224,7 @@
chronik.broadcastTxs([okRawTx, alpBurnRawTx]),
).to.be.rejectedWith(
Error,
- `Failed getting /broadcast-txs: 400: Tx ${alpBurnTxid} failed token checks: Unexpected burn: Burns 1 base tokens.`,
+ `Failed getting /broadcast-txs: 400: Tx ${alpBurnTxid} failed token checks: Unexpected burn: Burns 1 atoms.`,
);
// We can't broadcast an array of txs if one tx is invalid
@@ -346,7 +346,7 @@
{
...alpGenesisAfterMined.inputs[0],
outputScript: alpGenesisPreview.inputs[0].outputScript,
- value: alpGenesisPreview.inputs[0].value,
+ sats: alpGenesisPreview.inputs[0].sats,
},
],
});
diff --git a/modules/chronik-client/test/integration/plugins.ts b/modules/chronik-client/test/integration/plugins.ts
--- a/modules/chronik-client/test/integration/plugins.ts
+++ b/modules/chronik-client/test/integration/plugins.ts
@@ -7,12 +7,12 @@
import { ChildProcess } from 'node:child_process';
import { EventEmitter, once } from 'node:events';
import path from 'path';
-import { ChronikClient, WsMsgClient, WsEndpoint, Tx } from '../../index';
+import { ChronikClient, WsEndpoint, WsMsgClient } from '../../index';
import initializeTestRunner, {
cleanupMochaRegtest,
+ expectWsMsgs,
setMochaTimeout,
TestInfo,
- expectWsMsgs,
} from '../setup/testRunner';
const expect = chai.expect;
@@ -132,7 +132,7 @@
isFinal: false,
outpoint: BASE_OUTPOINT,
script: TEST_UTXO_OUTPUTSCRIPT,
- value: 1000,
+ sats: 1000n,
};
const FIRST_PLUGIN_OPRETURN = '6a0454455354046172676f04616c656603616263';
@@ -309,7 +309,7 @@
// We get plugin info in expected shape for outputs
expect(outputs[0]).to.deep.equal({
- value: 0,
+ sats: 0n,
outputScript: FIRST_PLUGIN_OPRETURN,
// No plugins key here as no associated plugin data for this output
});
@@ -368,7 +368,7 @@
groups: [BYTES_a],
},
},
- value: 4999990000,
+ sats: 4999990000n,
},
],
});
@@ -449,7 +449,7 @@
// We get plugin info in expected shape for outputs
expect(outputs[0]).to.deep.equal({
- value: 0,
+ sats: 0n,
outputScript: SECOND_PLUGIN_OPRETURN,
// No plugins key here as no associated plugin data for this output
});
@@ -509,7 +509,7 @@
groups: [BYTES_b],
},
},
- value: 4999980000,
+ sats: 4999980000n,
},
],
});
@@ -589,7 +589,7 @@
// We get plugin info in expected shape for outputs
expect(outputs[0]).to.deep.equal({
- value: 0,
+ sats: 0n,
outputScript: FIRST_PLUGIN_OPRETURN,
// No plugins key here as no associated plugin data for this output
});
@@ -705,7 +705,7 @@
// We get plugin info in expected shape for outputs
expect(outputs[0]).to.deep.equal({
- value: 0,
+ sats: 0n,
outputScript: THIRD_PLUGIN_OPRETURN,
// No plugins key here as no associated plugin data for this output
});
@@ -730,7 +730,7 @@
data: [BYTES_carp, BYTES_blub, BYTES_abc],
},
},
- value: 4999970000,
+ sats: 4999970000n,
};
expect(thesePluginUtxos).to.deep.equal({
groupHex: BYTES_c,
@@ -891,7 +891,7 @@
// We get plugin info in expected shape for outputs
expect(outputs[0]).to.deep.equal({
- value: 0,
+ sats: 0n,
outputScript: SECOND_PLUGIN_OPRETURN,
// No plugins key here as no associated plugin data for this output
});
diff --git a/modules/chronik-client/test/integration/script_endpoints.ts b/modules/chronik-client/test/integration/script_endpoints.ts
--- a/modules/chronik-client/test/integration/script_endpoints.ts
+++ b/modules/chronik-client/test/integration/script_endpoints.ts
@@ -404,7 +404,7 @@
// within history txs, confirmed txs are sorted in block order, unconfirmed txs are sorted by timeFirstSeen
// i.e., history.txs[0] will have the highest timeFirstSeen
// For txs with the same timeFirstSeen, the alphabetically-last txs appears first
- const historyClone: Tx[] = JSON.parse(JSON.stringify(history.txs));
+ const historyClone: Tx[] = [...history.txs];
// Sort historyClone by timeFirstSeen and then by txid
historyClone.sort(
@@ -618,7 +618,7 @@
broadcastTxids.length,
);
// Clone history.txs to test sorting
- const historyClone: Tx[] = JSON.parse(JSON.stringify(history.txs));
+ const historyClone: Tx[] = [...history.txs];
// history txs within blocks sorting
// The history endpoint returns confirmed txs sorted by timeFirstSeen (high to low) and then by txid (alphabetical last to first)
diff --git a/modules/chronik-client/test/integration/token_alp.ts b/modules/chronik-client/test/integration/token_alp.ts
--- a/modules/chronik-client/test/integration/token_alp.ts
+++ b/modules/chronik-client/test/integration/token_alp.ts
@@ -148,11 +148,11 @@
const BASE_TX_INPUT = {
inputScript: '0151',
outputScript: 'a914da1745e9b549bd0bfa1a569971c77eba30cd5a4b87',
- value: 5000,
+ sats: 5000n,
sequenceNo: 0,
};
const BASE_TX_OUTPUT = {
- value: 546,
+ sats: 546n,
outputScript: 'a914da1745e9b549bd0bfa1a569971c77eba30cd5a4b87',
};
const BASE_TX_TOKEN_INFO_ALP = {
@@ -162,7 +162,7 @@
number: 0,
},
entryIdx: 0,
- amount: '0',
+ atoms: 0n,
isMintBaton: false,
};
const BASE_TOKEN_ENTRY = {
@@ -170,8 +170,8 @@
isInvalid: false,
burnSummary: '',
failedColorings: [],
- actualBurnAmount: '0',
- intentionalBurn: '0',
+ actualBurnAtoms: 0n,
+ intentionalBurnAtoms: 0n,
burnsMintBatons: false,
};
let alpGenesisTxid = '';
@@ -283,7 +283,7 @@
txid: '3fa435fca55edf447ef7539ecba141a6585fa71ac4062cdcc61f1235c40f4613',
outIdx: 0,
},
- value: 5000000000,
+ sats: 5000000000n,
},
]);
@@ -291,17 +291,17 @@
const expectedOutputs = [
{
...BASE_TX_OUTPUT,
- value: 0,
+ sats: 0n,
outputScript:
'6a504c63534c5032000747454e4553495304544553540a5465737420546f6b656e12687474703a2f2f6578616d706c652e636f6d0a546f6b656e20446174610c546f6b656e205075626b657904040a00000000001400000000001e000000000000000000000002',
},
{
...BASE_TX_OUTPUT,
- value: 10000,
+ sats: 10000n,
token: {
...BASE_TX_TOKEN_INFO_ALP,
tokenId: alpGenesisTxid,
- amount: '10',
+ atoms: 10n,
},
},
{
@@ -309,7 +309,7 @@
token: {
...BASE_TX_TOKEN_INFO_ALP,
tokenId: alpGenesisTxid,
- amount: '20',
+ atoms: 20n,
},
},
{
@@ -317,16 +317,16 @@
token: {
...BASE_TX_TOKEN_INFO_ALP,
tokenId: alpGenesisTxid,
- amount: '30',
+ atoms: 30n,
},
},
{
...BASE_TX_OUTPUT,
- value: 4999900000,
+ sats: 4999900000n,
},
{
...BASE_TX_OUTPUT,
- value: 5000,
+ sats: 5000n,
token: {
...BASE_TX_TOKEN_INFO_ALP,
tokenId: alpGenesisTxid,
@@ -382,8 +382,8 @@
if ('token' in output) {
const { token } = output;
// Remove the entryIdx key from these outputs, as we do not expect to see it in tokenId.utxos() output
- delete (token as Token).entryIdx;
- utxoTokenKeysFromOutputs.push(output.token as Token);
+ delete (token as unknown as Token).entryIdx;
+ utxoTokenKeysFromOutputs.push(output.token as unknown as Token);
}
}
@@ -471,7 +471,7 @@
token: {
...BASE_TX_TOKEN_INFO_ALP,
tokenId: alpGenesisTxid,
- amount: '0',
+ atoms: 0n,
isMintBaton: true,
},
},
@@ -481,7 +481,7 @@
expect(alpMint.outputs).to.deep.equal([
{
...BASE_TX_OUTPUT,
- value: 0,
+ sats: 0n,
outputScript:
'6a5038534c503200044d494e54e2c68fa87324d048fbb0b72ca7d386ad757967f20244854f14920a6caa714dbb0205000000000000000000000001',
},
@@ -490,7 +490,7 @@
token: {
...BASE_TX_TOKEN_INFO_ALP,
tokenId: alpGenesisTxid,
- amount: '5',
+ atoms: 5n,
},
},
{
@@ -501,7 +501,7 @@
token: {
...BASE_TX_TOKEN_INFO_ALP,
tokenId: alpGenesisTxid,
- amount: '0',
+ atoms: 0n,
isMintBaton: true,
},
},
@@ -555,11 +555,11 @@
txid: alpGenesisTxid,
outIdx: 1,
},
- value: 10000,
+ sats: 10000n,
token: {
...BASE_TX_TOKEN_INFO_ALP,
tokenId: alpGenesisTxid,
- amount: '10',
+ atoms: 10n,
},
},
{
@@ -568,11 +568,11 @@
txid: '0dab1008db30343a4f771983e9fd96cbc15f0c6efc73f5249c9bae311ef1e92f',
outIdx: 1,
},
- value: 546,
+ sats: 546n,
token: {
...BASE_TX_TOKEN_INFO_ALP,
tokenId: alpGenesisTxid,
- amount: '5',
+ atoms: 5n,
},
},
]);
@@ -581,17 +581,17 @@
expect(alpSend.outputs).to.deep.equal([
{
...BASE_TX_OUTPUT,
- value: 0,
+ sats: 0n,
outputScript:
'6a5037534c5032000453454e44e2c68fa87324d048fbb0b72ca7d386ad757967f20244854f14920a6caa714dbb020300000000000c0000000000',
},
{
...BASE_TX_OUTPUT,
- value: 5000,
+ sats: 5000n,
token: {
...BASE_TX_TOKEN_INFO_ALP,
tokenId: alpGenesisTxid,
- amount: '3',
+ atoms: 3n,
},
},
{
@@ -599,7 +599,7 @@
token: {
...BASE_TX_TOKEN_INFO_ALP,
tokenId: alpGenesisTxid,
- amount: '12',
+ atoms: 12n,
},
},
]);
@@ -643,7 +643,7 @@
txid: alpGenesisTxid,
outIdx: 4,
},
- value: 4999900000,
+ sats: 4999900000n,
},
]);
@@ -651,22 +651,22 @@
expect(alpNextGenesis.outputs).to.deep.equal([
{
...BASE_TX_OUTPUT,
- value: 0,
+ sats: 0n,
outputScript:
'6a501b534c5032000747454e455349530000000000000164000000000002',
},
{
...BASE_TX_OUTPUT,
- value: 5000,
+ sats: 5000n,
token: {
...BASE_TX_TOKEN_INFO_ALP,
tokenId: alpNextGenesisTxid,
- amount: '100',
+ atoms: 100n,
},
},
{
...BASE_TX_OUTPUT,
- value: 5000,
+ sats: 5000n,
token: {
...BASE_TX_TOKEN_INFO_ALP,
tokenId: alpNextGenesisTxid,
@@ -675,7 +675,7 @@
},
{
...BASE_TX_OUTPUT,
- value: 5000,
+ sats: 5000n,
token: {
...BASE_TX_TOKEN_INFO_ALP,
tokenId: alpNextGenesisTxid,
@@ -684,7 +684,7 @@
},
{
...BASE_TX_OUTPUT,
- value: 4999800000,
+ sats: 4999800000n,
},
]);
@@ -734,7 +734,7 @@
...BASE_TX_TOKEN_INFO_ALP,
tokenId: alpGenesisTxid,
entryIdx: 2,
- amount: '3',
+ atoms: 3n,
},
},
{
@@ -756,7 +756,7 @@
expect(alpMulti.outputs).to.deep.equal([
{
...BASE_TX_OUTPUT,
- value: 0,
+ sats: 0n,
outputScript:
'6a5026534c5032000747454e45534953054d554c5449000000000002ffffffffffff0000000000000138534c503200044d494e542c787e508ba86115c7fb13cc582d97a6f3b7d60dad070dcf49e19d0aec12df72020000000000000500000000000030534c503200044255524ee2c68fa87324d048fbb0b72ca7d386ad757967f20244854f14920a6caa714dbb01000000000049534c5032000453454e44e2c68fa87324d048fbb0b72ca7d386ad757967f20244854f14920a6caa714dbb05000000000000000000000000000000000000000000000000020000000000',
},
@@ -765,7 +765,7 @@
token: {
...BASE_TX_TOKEN_INFO_ALP,
tokenId: alpMultiTxid,
- amount: '281474976710655',
+ atoms: 281474976710655n,
},
},
{
@@ -774,7 +774,7 @@
...BASE_TX_TOKEN_INFO_ALP,
tokenId: alpNextGenesisTxid,
entryIdx: 1,
- amount: '5',
+ atoms: 5n,
},
},
{
@@ -794,7 +794,7 @@
...BASE_TX_TOKEN_INFO_ALP,
tokenId: alpGenesisTxid,
entryIdx: 2,
- amount: '2',
+ atoms: 2n,
},
},
{
@@ -829,8 +829,8 @@
...BASE_TOKEN_ENTRY,
tokenId: alpGenesisTxid,
txType: 'SEND',
- actualBurnAmount: '1',
- intentionalBurn: '1',
+ actualBurnAtoms: 1n,
+ intentionalBurnAtoms: 1n,
tokenType: {
number: 0,
protocol: 'ALP',
@@ -925,7 +925,7 @@
txid: alpGenesisTxid,
outIdx: 6,
},
- value: 546,
+ sats: 546n,
token: {
...BASE_TX_TOKEN_INFO_ALP,
tokenId: alpGenesisTxid,
@@ -939,12 +939,12 @@
txid: alpMultiTxid,
outIdx: 1,
},
- value: 546,
+ sats: 546n,
token: {
...BASE_TX_TOKEN_INFO_ALP,
tokenId: alpMultiTxid,
entryIdx: 3,
- amount: '281474976710655',
+ atoms: 281474976710655n,
},
},
]);
@@ -953,7 +953,7 @@
expect(alpMega.outputs).to.deep.equal([
{
...BASE_TX_OUTPUT,
- value: 0,
+ sats: 0n,
outputScript:
'6a5036534c5032000747454e4553495303414c4c0000000000050000000000000700000000000000000000000000000000000100000000000215534c5032000747454e4553495300000000000000004c56534c503200044d494e54e2c68fa87324d048fbb0b72ca7d386ad757967f20244854f14920a6caa714dbb070000000000000000000000000000000000000000000000000000000000000000000000000000000000006338534c503200044d494e54e2c68fa87324d048fbb0b72ca7d386ad757967f20244854f14920a6caa714dbb02000000000000ffffffffffff0032534c503200044d494e54e2c68fa87324d048fbb0b72ca7d386ad757967f20244854f14920a6caa714dbb010000000000000130534c503200044255524ee2c68fa87324d048fbb0b72ca7d386ad757967f20244854f14920a6caa714dbb02000000000038534c503200044d494e542c787e508ba86115c7fb13cc582d97a6f3b7d60dad070dcf49e19d0aec12df7202030000000000000000000000014c56534c503200044d494e54e2c68fa87324d048fbb0b72ca7d386ad757967f20244854f14920a6caa714dbb07000000000000000000000000000000000000020000000000000000000000000000000000000000000000012c534c503200044d494e54e2c68fa87324d048fbb0b72ca7d386ad757967f20244854f14920a6caa714dbb000030534c503200044255524ee2c68fa87324d048fbb0b72ca7d386ad757967f20244854f14920a6caa714dbb0000000000004c73534c5032000453454e44ba2ea53336d07ab7bab5eb95f53a6dd041acfca80f2af3a6b93abad7147fc4e30c0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007b00000000004c67534c5032000453454e44ba2ea53336d07ab7bab5eb95f53a6dd041acfca80f2af3a6b93abad7147fc4e30a000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000ffffffffffff2c534c503200044d494e54ba2ea53336d07ab7bab5eb95f53a6dd041acfca80f2af3a6b93abad7147fc4e3000005534c50328930534c503200044255524eba2ea53336d07ab7bab5eb95f53a6dd041acfca80f2af3a6b93abad7147fc4e300000000000005534c50329a',
},
@@ -963,7 +963,7 @@
...BASE_TX_TOKEN_INFO_ALP,
tokenId: alpNextGenesisTxid,
entryIdx: 1,
- amount: '3',
+ atoms: 3n,
},
},
{
@@ -971,7 +971,7 @@
token: {
...BASE_TX_TOKEN_INFO_ALP,
tokenId: alpMegaTxid,
- amount: '7',
+ atoms: 7n,
},
},
{
@@ -985,12 +985,12 @@
},
{
...BASE_TX_OUTPUT,
- value: 1000,
+ sats: 1000n,
token: {
...BASE_TX_TOKEN_INFO_ALP,
tokenId: alpGenesisTxid,
entryIdx: 2,
- amount: '2',
+ atoms: 2n,
},
},
{
@@ -998,7 +998,7 @@
token: {
...BASE_TX_TOKEN_INFO_ALP,
tokenId: alpMegaTxid,
- amount: '1',
+ atoms: 1n,
},
},
{
@@ -1046,7 +1046,7 @@
...BASE_TX_TOKEN_INFO_ALP,
tokenId: alpMultiTxid,
entryIdx: 3,
- amount: '281474976710655',
+ atoms: 281474976710655n,
},
},
]);
@@ -1089,7 +1089,7 @@
'bb4d71aa6c0a92144f854402f2677975ad86d3a72cb7b0fb48d02473a88fc6e2',
txType: 'MINT',
burnSummary:
- 'Invalid coloring at pushdata idx 2: Too few outputs, expected 107 but got 11. Invalid coloring at pushdata idx 3: Overlapping amount when trying to color 281474976710655 at index 2, output is already colored with 7 of 72101f535470e0a6de7db9ba0ba115845566f738cc5124255b472347b5927565 (ALP STANDARD (V0)). Invalid coloring at pushdata idx 4: Overlapping mint baton when trying to color mint baton at index 2, output is already colored with 7 of 72101f535470e0a6de7db9ba0ba115845566f738cc5124255b472347b5927565 (ALP STANDARD (V0)). Invalid coloring at pushdata idx 8: Duplicate token_id bb4d71aa6c0a92144f854402f2677975ad86d3a72cb7b0fb48d02473a88fc6e2, found in section 2. Invalid coloring at pushdata idx 9: Duplicate intentional burn token_id bb4d71aa6c0a92144f854402f2677975ad86d3a72cb7b0fb48d02473a88fc6e2, found in burn #0 and #1',
+ 'Invalid coloring at pushdata idx 2: Too few outputs, expected 107 but got 11. Invalid coloring at pushdata idx 3: Overlapping atoms when trying to color 281474976710655 at index 2, output is already colored with 7 of 72101f535470e0a6de7db9ba0ba115845566f738cc5124255b472347b5927565 (ALP STANDARD (V0)). Invalid coloring at pushdata idx 4: Overlapping mint baton when trying to color mint baton at index 2, output is already colored with 7 of 72101f535470e0a6de7db9ba0ba115845566f738cc5124255b472347b5927565 (ALP STANDARD (V0)). Invalid coloring at pushdata idx 8: Duplicate token_id bb4d71aa6c0a92144f854402f2677975ad86d3a72cb7b0fb48d02473a88fc6e2, found in section 2. Invalid coloring at pushdata idx 9: Duplicate intentional burn token_id bb4d71aa6c0a92144f854402f2677975ad86d3a72cb7b0fb48d02473a88fc6e2, found in burn #0 and #1',
failedColorings: [
{
pushdataIdx: 2,
@@ -1097,7 +1097,7 @@
},
{
pushdataIdx: 3,
- error: 'Overlapping amount when trying to color 281474976710655 at index 2, output is already colored with 7 of 72101f535470e0a6de7db9ba0ba115845566f738cc5124255b472347b5927565 (ALP STANDARD (V0))',
+ error: 'Overlapping atoms when trying to color 281474976710655 at index 2, output is already colored with 7 of 72101f535470e0a6de7db9ba0ba115845566f738cc5124255b472347b5927565 (ALP STANDARD (V0))',
},
{
pushdataIdx: 4,
@@ -1112,7 +1112,7 @@
error: 'Duplicate intentional burn token_id bb4d71aa6c0a92144f854402f2677975ad86d3a72cb7b0fb48d02473a88fc6e2, found in burn #0 and #1',
},
],
- intentionalBurn: '2',
+ intentionalBurnAtoms: 2n,
tokenType: {
number: 0,
protocol: 'ALP',
@@ -1172,7 +1172,7 @@
const blockTxs = await chronik.blockTxs(CHAIN_INIT_HEIGHT + 2);
// Clone as we will use blockTxs.txs later
- const txsFromBlock = JSON.parse(JSON.stringify(blockTxs.txs));
+ const txsFromBlock = [...blockTxs.txs];
// The first tx is the coinbase tx, which is not a token
const coinbaseTx = txsFromBlock.shift()!;
diff --git a/modules/chronik-client/test/integration/token_slp_fungible.ts b/modules/chronik-client/test/integration/token_slp_fungible.ts
--- a/modules/chronik-client/test/integration/token_slp_fungible.ts
+++ b/modules/chronik-client/test/integration/token_slp_fungible.ts
@@ -112,11 +112,11 @@
const BASE_TX_INPUT = {
inputScript: '0151',
outputScript: 'a914da1745e9b549bd0bfa1a569971c77eba30cd5a4b87',
- value: 5000000000,
+ sats: 5000000000n,
sequenceNo: 0,
};
const BASE_TX_OUTPUT = {
- value: 2000,
+ sats: 2000n,
outputScript: 'a914da1745e9b549bd0bfa1a569971c77eba30cd5a4b87',
};
const BASE_TX_TOKEN_INFO_SLP_FUNGIBLE = {
@@ -126,7 +126,7 @@
number: 1,
},
entryIdx: 0,
- amount: '0',
+ atoms: 0n,
isMintBaton: false,
};
const BASE_TOKEN_ENTRY = {
@@ -134,8 +134,8 @@
isInvalid: false,
burnSummary: '',
failedColorings: [],
- actualBurnAmount: '0',
- intentionalBurn: '0',
+ actualBurnAtoms: 0n,
+ intentionalBurnAtoms: 0n,
burnsMintBatons: false,
};
let slpGenesisTxid = '';
@@ -184,22 +184,22 @@
expect(slpGenesis.outputs).to.deep.equal([
{
...BASE_TX_OUTPUT,
- value: 0,
+ sats: 0n,
outputScript:
'6a04534c500001010747454e4553495307534c5054455354105465737420534c5020546f6b656e203312687474703a2f2f6578616d706c652f736c7020787878787878787878787878787878787878787878787878787878787878787801040102080000000000001388',
},
{
...BASE_TX_OUTPUT,
- value: 10000,
+ sats: 10000n,
token: {
...BASE_TX_TOKEN_INFO_SLP_FUNGIBLE,
tokenId: slpGenesisTxid,
- amount: '5000',
+ atoms: 5000n,
},
},
{
...BASE_TX_OUTPUT,
- value: 10000,
+ sats: 10000n,
token: {
...BASE_TX_TOKEN_INFO_SLP_FUNGIBLE,
tokenId: slpGenesisTxid,
@@ -208,7 +208,7 @@
},
{
...BASE_TX_OUTPUT,
- value: 4999600000,
+ sats: 4999600000n,
},
]);
@@ -289,7 +289,7 @@
txid: slpGenesisTxid,
outIdx: 2,
},
- value: 10000,
+ sats: 10000n,
token: {
...BASE_TX_TOKEN_INFO_SLP_FUNGIBLE,
tokenId: slpGenesisTxid,
@@ -302,7 +302,7 @@
expect(slpMint.outputs).to.deep.equal([
{
...BASE_TX_OUTPUT,
- value: 0,
+ sats: 0n,
outputScript:
'6a04534c50000101044d494e5420cd295e7eb883b5826e2d8872b1626a4af4ce7ec81c468f1bfdad14632036d20a0103080000000000000014',
},
@@ -311,7 +311,7 @@
token: {
...BASE_TX_TOKEN_INFO_SLP_FUNGIBLE,
tokenId: slpGenesisTxid,
- amount: '20',
+ atoms: 20n,
},
},
{
@@ -369,11 +369,11 @@
txid: slpGenesisTxid,
outIdx: 1,
},
- value: 10000,
+ sats: 10000n,
token: {
...BASE_TX_TOKEN_INFO_SLP_FUNGIBLE,
tokenId: slpGenesisTxid,
- amount: '5000',
+ atoms: 5000n,
},
},
]);
@@ -382,26 +382,26 @@
expect(slpSend.outputs).to.deep.equal([
{
...BASE_TX_OUTPUT,
- value: 0,
+ sats: 0n,
outputScript:
'6a04534c500001010453454e4420cd295e7eb883b5826e2d8872b1626a4af4ce7ec81c468f1bfdad14632036d20a0800000000000003e8080000000000000fa0',
},
{
...BASE_TX_OUTPUT,
- value: 4000,
+ sats: 4000n,
token: {
...BASE_TX_TOKEN_INFO_SLP_FUNGIBLE,
tokenId: slpGenesisTxid,
- amount: '1000',
+ atoms: 1000n,
},
},
{
...BASE_TX_OUTPUT,
- value: 4000,
+ sats: 4000n,
token: {
...BASE_TX_TOKEN_INFO_SLP_FUNGIBLE,
tokenId: slpGenesisTxid,
- amount: '4000',
+ atoms: 4000n,
},
},
]);
@@ -445,7 +445,7 @@
txid: slpGenesisTxid,
outIdx: 3,
},
- value: 4999600000,
+ sats: 4999600000n,
},
]);
@@ -454,13 +454,13 @@
expect(slpEmptyGenesis.outputs).to.deep.equal([
{
...BASE_TX_OUTPUT,
- value: 0,
+ sats: 0n,
outputScript:
'6a04534c500001010747454e455349534c004c004c004c0001004c00080000000000000000',
},
{
...BASE_TX_OUTPUT,
- value: 4999500000,
+ sats: 4999500000n,
},
]);
@@ -496,7 +496,7 @@
const blockTxs = await chronik.blockTxs(CHAIN_INIT_HEIGHT + 2);
// Clone as we will use blockTxs.txs later
- const txsFromBlock = JSON.parse(JSON.stringify(blockTxs.txs));
+ const txsFromBlock = [...blockTxs.txs];
// The first tx is the coinbase tx, which is not a token
const coinbaseTx = txsFromBlock.shift()!;
diff --git a/modules/chronik-client/test/integration/token_slp_mint_vault.ts b/modules/chronik-client/test/integration/token_slp_mint_vault.ts
--- a/modules/chronik-client/test/integration/token_slp_mint_vault.ts
+++ b/modules/chronik-client/test/integration/token_slp_mint_vault.ts
@@ -115,11 +115,11 @@
},
inputScript: '0151',
outputScript: 'a914da1745e9b549bd0bfa1a569971c77eba30cd5a4b87',
- value: 100000,
+ sats: 100000n,
sequenceNo: 0,
};
const BASE_TX_OUTPUT = {
- value: 10000,
+ sats: 10000n,
outputScript: 'a914da1745e9b549bd0bfa1a569971c77eba30cd5a4b87',
};
const BASE_TX_TOKEN_INFO_SLP_V2 = {
@@ -129,7 +129,7 @@
number: 2,
},
entryIdx: 0,
- amount: '1000',
+ atoms: 1000n,
isMintBaton: false,
};
const BASE_TOKEN_ENTRY = {
@@ -137,8 +137,8 @@
isInvalid: false,
burnSummary: '',
failedColorings: [],
- actualBurnAmount: '0',
- intentionalBurn: '0',
+ actualBurnAtoms: 0n,
+ intentionalBurnAtoms: 0n,
burnsMintBatons: false,
};
@@ -192,7 +192,7 @@
},
{
...BASE_TX_OUTPUT,
- value: 79000,
+ sats: 79000n,
outputScript: 'a91428e2146de5a061bf57845a04968d89cbdab733e387',
},
]);
@@ -261,13 +261,13 @@
expect(slpVaultGenesis.outputs).to.deep.equal([
{
...BASE_TX_OUTPUT,
- value: 0,
+ sats: 0n,
outputScript:
'6a04534c500001020747454e4553495308534c505641554c540130013020787878787878787878787878787878787878787878787878787878787878787801001428e2146de5a061bf57845a04968d89cbdab733e30800000000000003e8',
},
{
...BASE_TX_OUTPUT,
- value: 546,
+ sats: 546n,
token: {
...BASE_TX_TOKEN_INFO_SLP_V2,
tokenId: slpVaultGenesisTxid,
@@ -275,7 +275,7 @@
},
{
...BASE_TX_OUTPUT,
- value: 99000,
+ sats: 99000n,
},
]);
@@ -342,7 +342,7 @@
},
inputScript: '015c',
outputScript: 'a91428e2146de5a061bf57845a04968d89cbdab733e387',
- value: 10000,
+ sats: 10000n,
},
]);
@@ -350,13 +350,13 @@
expect(slpVaultMint.outputs).to.deep.equal([
{
...BASE_TX_OUTPUT,
- value: 0,
+ sats: 0n,
outputScript:
'6a04534c50000102044d494e5420768626ba27515513f148d714453bd2964f0de49c6686fa54da56ae4e19387c70080000000000000fa0',
},
{
...BASE_TX_OUTPUT,
- value: 9000,
+ sats: 9000n,
},
]);
diff --git a/modules/chronik-client/test/integration/token_slp_nft1.ts b/modules/chronik-client/test/integration/token_slp_nft1.ts
--- a/modules/chronik-client/test/integration/token_slp_nft1.ts
+++ b/modules/chronik-client/test/integration/token_slp_nft1.ts
@@ -116,11 +116,11 @@
},
inputScript: '0151',
outputScript: 'a914da1745e9b549bd0bfa1a569971c77eba30cd5a4b87',
- value: 5000000000,
+ sats: 5000000000n,
sequenceNo: 0,
};
const BASE_TX_OUTPUT = {
- value: 2000,
+ sats: 2000n,
outputScript: 'a914da1745e9b549bd0bfa1a569971c77eba30cd5a4b87',
};
const BASE_TX_TOKEN_INFO_SLP_NFT = {
@@ -130,7 +130,7 @@
number: 129,
},
entryIdx: 0,
- amount: '0',
+ atoms: 0n,
isMintBaton: true,
};
const BASE_TOKEN_ENTRY = {
@@ -138,8 +138,8 @@
isInvalid: false,
burnSummary: '',
failedColorings: [],
- actualBurnAmount: '0',
- intentionalBurn: '0',
+ actualBurnAtoms: 0n,
+ intentionalBurnAtoms: 0n,
burnsMintBatons: false,
};
let slpGenesisTxid = '';
@@ -184,23 +184,23 @@
expect(slpGenesis.outputs).to.deep.equal([
{
...BASE_TX_OUTPUT,
- value: 0,
+ sats: 0n,
outputScript:
'6a04534c500001810747454e455349530d534c50204e46542047524f555013536c70204e46542047524f555020746f6b656e0e687474703a2f2f736c702e6e667420787878787878787878787878787878787878787878787878787878787878787801040102080000000000001388',
},
{
...BASE_TX_OUTPUT,
- value: 10000,
+ sats: 10000n,
token: {
...BASE_TX_TOKEN_INFO_SLP_NFT,
tokenId: slpGenesisTxid,
- amount: '5000',
+ atoms: 5000n,
isMintBaton: false,
},
},
{
...BASE_TX_OUTPUT,
- value: 10000,
+ sats: 10000n,
token: {
...BASE_TX_TOKEN_INFO_SLP_NFT,
tokenId: slpGenesisTxid,
@@ -208,7 +208,7 @@
},
{
...BASE_TX_OUTPUT,
- value: 4999600000,
+ sats: 4999600000n,
},
]);
@@ -285,7 +285,7 @@
txid: 'b5100125684e0a7ccb8a6a2a0272586e1275f438924464000df5c834ed64bccb',
outIdx: 2,
},
- value: 10000,
+ sats: 10000n,
token: {
...BASE_TX_TOKEN_INFO_SLP_NFT,
tokenId: slpGenesisTxid,
@@ -297,7 +297,7 @@
expect(slpMint.outputs).to.deep.equal([
{
...BASE_TX_OUTPUT,
- value: 0,
+ sats: 0n,
outputScript:
'6a04534c50000181044d494e5420b5100125684e0a7ccb8a6a2a0272586e1275f438924464000df5c834ed64bccb0103080000000000000014',
},
@@ -306,7 +306,7 @@
token: {
...BASE_TX_TOKEN_INFO_SLP_NFT,
tokenId: slpGenesisTxid,
- amount: '20',
+ atoms: 20n,
isMintBaton: false,
},
},
@@ -364,11 +364,11 @@
txid: 'b5100125684e0a7ccb8a6a2a0272586e1275f438924464000df5c834ed64bccb',
outIdx: 1,
},
- value: 10000,
+ sats: 10000n,
token: {
...BASE_TX_TOKEN_INFO_SLP_NFT,
tokenId: slpGenesisTxid,
- amount: '5000',
+ atoms: 5000n,
isMintBaton: false,
},
},
@@ -378,7 +378,7 @@
expect(slpSend.outputs).to.deep.equal([
{
...BASE_TX_OUTPUT,
- value: 0,
+ sats: 0n,
outputScript:
'6a04534c500001810453454e4420b5100125684e0a7ccb8a6a2a0272586e1275f438924464000df5c834ed64bccb080000000000000001080000000000000063080000000000000384080000000000000fa0',
},
@@ -387,7 +387,7 @@
token: {
...BASE_TX_TOKEN_INFO_SLP_NFT,
tokenId: slpGenesisTxid,
- amount: '1',
+ atoms: 1n,
isMintBaton: false,
},
},
@@ -396,7 +396,7 @@
token: {
...BASE_TX_TOKEN_INFO_SLP_NFT,
tokenId: slpGenesisTxid,
- amount: '99',
+ atoms: 99n,
isMintBaton: false,
},
},
@@ -405,7 +405,7 @@
token: {
...BASE_TX_TOKEN_INFO_SLP_NFT,
tokenId: slpGenesisTxid,
- amount: '900',
+ atoms: 900n,
isMintBaton: false,
},
},
@@ -414,7 +414,7 @@
token: {
...BASE_TX_TOKEN_INFO_SLP_NFT,
tokenId: slpGenesisTxid,
- amount: '4000',
+ atoms: 4000n,
isMintBaton: false,
},
},
@@ -485,12 +485,12 @@
txid: '2c6258bee9033399108e845b3c69e60746b89624b3ec18c5d5cc4b2e88c6ccab',
outIdx: 1,
},
- value: 2000,
+ sats: 2000n,
token: {
...BASE_TX_TOKEN_INFO_SLP_NFT,
tokenId: slpGenesisTxid,
entryIdx: 1,
- amount: '1',
+ atoms: 1n,
isMintBaton: false,
},
},
@@ -500,13 +500,13 @@
expect(slpChildGenesis.outputs).to.deep.equal([
{
...BASE_TX_OUTPUT,
- value: 0,
+ sats: 0n,
outputScript:
'6a04534c500001410747454e455349530d534c50204e4654204348494c4413536c70204e4654204348494c4420746f6b656e4c004c0001004c00080000000000000001',
},
{
...BASE_TX_OUTPUT,
- value: 1400,
+ sats: 1400n,
token: {
...BASE_TX_TOKEN_INFO_SLP_NFT,
tokenId: slpChildGenesisTxid,
@@ -515,7 +515,7 @@
protocol: 'SLP',
type: 'SLP_TOKEN_TYPE_NFT1_CHILD',
},
- amount: '1',
+ atoms: 1n,
isMintBaton: false,
},
},
@@ -571,7 +571,7 @@
).to.eql(true);
// Clone as we will use blockTxs.txs later
- const txsFromBlock = JSON.parse(JSON.stringify(blockTxs.txs));
+ const txsFromBlock = [...blockTxs.txs];
// The first tx is the coinbase tx, which is not a token
const coinbaseTx = txsFromBlock.shift()!;
diff --git a/modules/chronik-client/test/integration/websocket.ts b/modules/chronik-client/test/integration/websocket.ts
--- a/modules/chronik-client/test/integration/websocket.ts
+++ b/modules/chronik-client/test/integration/websocket.ts
@@ -508,7 +508,7 @@
scriptsig: coinbaseScriptsig,
outputs: [
{
- value: coinbaseOutValue,
+ sats: BigInt(coinbaseOutValue),
outputScript: coinbaseOutScriptpubkey,
},
],
@@ -585,7 +585,7 @@
scriptsig: coinbaseScriptsig,
outputs: [
{
- value: coinbaseOutValue,
+ sats: BigInt(coinbaseOutValue),
outputScript: coinbaseOutScriptpubkey,
},
],
@@ -711,7 +711,7 @@
scriptsig: coinbaseScriptsig,
outputs: [
{
- value: coinbaseOutValue,
+ sats: BigInt(coinbaseOutValue),
outputScript: coinbaseOutScriptpubkey,
},
],
@@ -747,7 +747,7 @@
scriptsig: coinbaseScriptsig,
outputs: [
{
- value: coinbaseOutValue,
+ sats: BigInt(coinbaseOutValue),
outputScript: coinbaseOutScriptpubkey,
},
],
diff --git a/modules/chronik-client/tsconfig.json b/modules/chronik-client/tsconfig.json
--- a/modules/chronik-client/tsconfig.json
+++ b/modules/chronik-client/tsconfig.json
@@ -11,7 +11,7 @@
// "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */
/* Language and Environment */
- "target": "es2016" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */,
+ "target": "es2020" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */,
// "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */
// "jsx": "preserve", /* Specify what JSX code is generated. */
// "experimentalDecorators": true, /* Enable experimental support for TC39 stage 2 draft decorators. */
diff --git a/modules/ecash-agora/README.md b/modules/ecash-agora/README.md
--- a/modules/ecash-agora/README.md
+++ b/modules/ecash-agora/README.md
@@ -186,3 +186,7 @@
### 1.0.1
- Do not validate for unspendable offer creation when we calculate fee in `acceptFeeSats()` [D17648](https://reviews.bitcoinabc.org/D17648)
+
+### 2.0.0
+
+- Improve types and shapes in line with chronik proto updates [D17650](https://reviews.bitcoinabc.org/D17650)
diff --git a/modules/ecash-agora/package-lock.json b/modules/ecash-agora/package-lock.json
--- a/modules/ecash-agora/package-lock.json
+++ b/modules/ecash-agora/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "ecash-agora",
- "version": "1.0.1",
+ "version": "2.0.0",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "ecash-agora",
- "version": "1.0.1",
+ "version": "2.0.0",
"license": "MIT",
"dependencies": {
"chronik-client": "file:../chronik-client",
diff --git a/modules/ecash-agora/package.json b/modules/ecash-agora/package.json
--- a/modules/ecash-agora/package.json
+++ b/modules/ecash-agora/package.json
@@ -1,12 +1,12 @@
{
"name": "ecash-agora",
- "version": "1.0.1",
+ "version": "2.0.0",
"description": "Library for interacting with the eCash Agora protocol",
"main": "./dist/index.js",
"scripts": {
"build": "tsc && tsc -p ./tsconfig.build.json",
"test": "mocha --import=tsx ./src/*.test.ts ./src/**/*.test.ts",
- "integration-tests": "mocha --import=tsx ./tests/*.test.ts --timeout 60000",
+ "integration-tests": "mocha --import=tsx ./tests/oneshot.test.ts --timeout 60000",
"coverage": "nyc npm run test",
"junit": "npm run test --reporter mocha-junit-reporter"
},
diff --git a/modules/ecash-agora/src/ad.ts b/modules/ecash-agora/src/ad.ts
--- a/modules/ecash-agora/src/ad.ts
+++ b/modules/ecash-agora/src/ad.ts
@@ -51,7 +51,7 @@
opreturnScript = slpSend(
tokenEntry.tokenId,
tokenEntry.tokenType.number,
- [0, BigInt(offerOutput.token.amount)],
+ [0n, offerOutput.token.atoms],
);
break;
case 'ALP':
@@ -103,7 +103,7 @@
txBuilderInput: {
prevOut: outpoint,
signData: {
- value: offerOutput.value,
+ sats: offerOutput.sats,
redeemScript: expectedAgoraScript,
},
},
diff --git a/modules/ecash-agora/src/agora.ts b/modules/ecash-agora/src/agora.ts
--- a/modules/ecash-agora/src/agora.ts
+++ b/modules/ecash-agora/src/agora.ts
@@ -13,11 +13,10 @@
} from 'chronik-client';
import {
alpSend,
- Amount,
+ Atoms,
Bytes,
DEFAULT_DUST_LIMIT,
DEFAULT_FEE_PER_KB,
- Ecc,
EccDummy,
emppScript,
fromHex,
@@ -79,13 +78,11 @@
/** If an offer is TAKEN */
export interface TakenInfo {
/** satoshis paid in taking an offer */
- satoshisPaid: number;
+ satoshisPaid: bigint;
/**
- * amount of token purchased in base tokens
- * (aka token satoshis, the token qty without
- * decimals applied)
+ * amount of token purchased in atoms aka base tokens
*/
- baseTokens: string;
+ atoms: bigint;
/** taker outputScript as a hex string*/
takerScriptHex: string;
}
@@ -154,7 +151,7 @@
/** For partial offers: Number of accepted tokens */
acceptedTokens?: bigint;
/** Dust amount to use for the token output. */
- dustAmount?: number;
+ dustAmount?: bigint;
/** Fee per kB to use when building the tx. */
feePerKb?: number;
/** Allow accepting an offer such that the remaining quantity is unacceptable */
@@ -169,7 +166,7 @@
fuelInputs: params.fuelInputs,
extraOutputs: [
{
- value: dustAmount,
+ sats: dustAmount,
script: params.recipientScript,
},
params.recipientScript,
@@ -202,7 +199,7 @@
fuelInputs: params.extraInputs ?? [],
extraOutputs: [
{
- value: 0,
+ sats: 0n,
script: params.recipientScript,
},
],
@@ -277,15 +274,15 @@
),
});
txBuild.inputs.push(...params.fuelInputs);
- const sendAmounts: Amount[] = [0];
- const offeredTokens = BigInt(this.token.amount);
+ const sendAmounts: Atoms[] = [0n];
+ const offeredTokens = this.token.atoms;
if (offeredTokens > params.acceptedTokens) {
sendAmounts.push(offeredTokens - params.acceptedTokens);
}
sendAmounts.push(params.acceptedTokens);
if (agoraPartial.tokenProtocol === 'SLP') {
txBuild.outputs.push({
- value: 0,
+ sats: 0n,
script: slpSend(
this.token.tokenId,
this.token.tokenType.number,
@@ -294,7 +291,7 @@
});
} else if (agoraPartial.tokenProtocol === 'ALP') {
txBuild.outputs.push({
- value: 0,
+ sats: 0n,
script: emppScript([
agoraPartial.adPushdata(),
alpSend(
@@ -308,7 +305,7 @@
throw new Error('Not implemented');
}
txBuild.outputs.push({
- value: agoraPartial.askedSats(params.acceptedTokens),
+ sats: agoraPartial.askedSats(params.acceptedTokens),
script: Script.p2pkh(shaRmd160(agoraPartial.makerPk)),
});
if (offeredTokens > params.acceptedTokens) {
@@ -319,7 +316,7 @@
truncFactor,
});
txBuild.outputs.push({
- value: agoraPartial.dustAmount,
+ sats: agoraPartial.dustAmount,
script: Script.p2sh(
shaRmd160(newAgoraPartial.script().bytecode),
),
@@ -358,7 +355,7 @@
/** Script to send canceled tokens and the leftover sats (if any) to. */
recipientScript: Script;
/** Dust amount to use for the token output. */
- dustAmount?: number;
+ dustAmount?: bigint;
/** Fee per kB to use when building the tx. */
feePerKb?: number;
}): Tx {
@@ -369,7 +366,7 @@
fuelInputs: params.fuelInputs,
extraOutputs: [
{
- value: dustAmount,
+ sats: dustAmount,
script: params.recipientScript,
},
params.recipientScript,
@@ -403,7 +400,7 @@
fuelInputs: params.extraInputs ?? [],
extraOutputs: [
{
- value: 0,
+ sats: 0n,
script: params.recipientScript,
},
],
@@ -438,22 +435,22 @@
switch (tokenProtocol) {
case 'SLP':
outputs.push({
- value: 0,
+ sats: 0n,
script: slpSend(
this.token.tokenId,
this.token.tokenType.number,
- [BigInt(this.token.amount)],
+ [this.token.atoms],
),
});
break;
case 'ALP':
outputs.push({
- value: 0,
+ sats: 0n,
script: emppScript([
alpSend(
this.token.tokenId,
this.token.tokenType.number,
- [BigInt(this.token.amount)],
+ [this.token.atoms],
),
]),
});
@@ -533,13 +530,13 @@
export class Agora {
private chronik: ChronikClient;
private plugin: PluginEndpoint;
- private dustAmount: number;
+ private dustAmount: bigint;
/**
* Create an Agora instance. The provided Chronik instance must have the
* "agora" plugin loaded.
**/
- public constructor(chronik: ChronikClient, dustAmount?: number) {
+ public constructor(chronik: ChronikClient, dustAmount?: bigint) {
this.chronik = chronik;
this.plugin = chronik.plugin(PLUGIN_NAME);
this.dustAmount = dustAmount ?? DEFAULT_DUST_LIMIT;
@@ -676,7 +673,7 @@
// The purchase price is satoshis that go to the offer creator
// Index 1 output
- const satoshisPaid = tx.outputs[1].value;
+ const satoshisPaid = tx.outputs[1].sats;
// The taker receives the purchased tokens at a p2pkh address
// This is at index 2 for a buy of the full offer and index 3 for a partial buy
@@ -690,14 +687,14 @@
const takerScriptHex =
tx.outputs[takerBuyIndex].outputScript;
- const baseTokens = tx.outputs[takerBuyIndex].token?.amount;
- if (typeof baseTokens === 'string') {
+ const atoms = tx.outputs[takerBuyIndex].token?.atoms;
+ if (typeof atoms === 'string') {
// Should always be true but we may have different kinds of agora
// offers in the future
// So, we only set if we have the info we expect
takenInfo = {
satoshisPaid,
- baseTokens,
+ atoms,
takerScriptHex,
};
}
@@ -708,7 +705,7 @@
outpoint: input.prevOut,
blockHeight: tx.block?.height ?? -1,
isCoinbase: tx.isCoinbase,
- value: input.value,
+ sats: input.sats,
script: input.outputScript!,
isFinal: false,
plugins: input.plugins,
@@ -864,11 +861,11 @@
const outputsSerBytes = new Bytes(fromHex(outputsSerHex));
const enforcedOutputs: TxOutput[] = [
{
- value: BigInt(0),
+ sats: 0n,
script: slpSend(
utxo.token.tokenId,
utxo.token.tokenType.number,
- [0, BigInt(utxo.token.amount)],
+ [0n, utxo.token.atoms],
),
},
];
@@ -897,7 +894,7 @@
txBuilderInput: {
prevOut: utxo.outpoint,
signData: {
- value: utxo.value,
+ sats: utxo.sats,
redeemScript: agoraOneshot.script(),
},
},
@@ -956,8 +953,7 @@
);
const agoraPartial = new AgoraPartial({
- truncTokens:
- BigInt(utxo.token.amount) >> (8n * BigInt(numTokenTruncBytes)),
+ truncTokens: utxo.token.atoms >> (8n * BigInt(numTokenTruncBytes)),
numTokenTruncBytes,
tokenScaleFactor,
scaledTruncTokensPerTruncSat,
@@ -981,7 +977,7 @@
txBuilderInput: {
prevOut: utxo.outpoint,
signData: {
- value: utxo.value,
+ sats: utxo.sats,
redeemScript: agoraPartial.script(),
},
},
diff --git a/modules/ecash-agora/src/inputs.test.ts b/modules/ecash-agora/src/inputs.test.ts
--- a/modules/ecash-agora/src/inputs.test.ts
+++ b/modules/ecash-agora/src/inputs.test.ts
@@ -22,7 +22,7 @@
// CACHET candle created by Agora Partial Alpha
// Created by approx params offering 100, min 0.1, 10,000 XEC per CACHET
const agoraPartialCachetAlphaOne = new AgoraPartial({
- dustAmount: 546,
+ dustAmount: 546n,
enforcedLockTime: 1040365320,
minAcceptedScaledTruncTokens: 2147470n,
numSatsTruncBytes: 1,
@@ -43,7 +43,7 @@
},
status: 'OPEN',
token: {
- amount: '10000',
+ atoms: 10000n,
isMintBaton: false,
tokenId:
'aed861a31b96934b88c0252ede135cb9700d7649f69191235087a3030e553cb1',
@@ -60,7 +60,7 @@
},
signData: {
redeemScript: agoraPartialCachetAlphaOne.script(),
- value: 546,
+ sats: 546n,
},
},
variant: {
@@ -73,7 +73,7 @@
const heismanNftOne = new AgoraOneshot({
enforcedOutputs: [
{
- value: 0n,
+ sats: 0n,
script: new Script(
fromHex(
'6a04534c500001410453454e4420be095430a16a024134bea079f235bcd2f79425c42659f9346416f626671f371c080000000000000000080000000000000001',
@@ -81,7 +81,7 @@
),
},
{
- value: 5000000000n,
+ sats: 5000000000n,
script: new Script(
fromHex('76a91495e79f51d4260bc0dc3ba7fb77c7be92d0fbdd1d88ac'),
),
@@ -102,10 +102,10 @@
type: 'SLP_TOKEN_TYPE_NFT1_CHILD',
number: 65,
} as TokenType,
- amount: '1',
+ atoms: 1n,
isMintBaton: false,
},
- value: 546,
+ sats: 546n,
};
export const heismanNftOneOffer = new AgoraOffer({
variant: {
@@ -117,7 +117,7 @@
txBuilderInput: {
prevOut: heismanNftOneUtxo.outpoint,
signData: {
- value: heismanNftOneUtxo.value,
+ sats: heismanNftOneUtxo.sats,
redeemScript: heismanNftOne.script(),
},
},
@@ -191,10 +191,10 @@
// acceptFeeSats 1182n
// askedSats 5460736n
// requiredSats 5461918n
- utxos: [{ value: 5461918 }] as unknown as ScriptUtxo[],
+ utxos: [{ sats: 5461918n }] as unknown as ScriptUtxo[],
acceptedTokens: 546n,
feePerKb: 1000,
- returned: [{ value: 5461918 }] as unknown as ScriptUtxo[],
+ returned: [{ sats: 5461918n }] as unknown as ScriptUtxo[],
},
{
description:
@@ -203,10 +203,10 @@
// acceptFeeSats 2376n
// askedSats 5460736n
// requiredSats 5463112
- utxos: [{ value: 5463112 }] as unknown as ScriptUtxo[],
+ utxos: [{ sats: 5463112n }] as unknown as ScriptUtxo[],
acceptedTokens: 546n,
feePerKb: 2010,
- returned: [{ value: 5463112 }] as unknown as ScriptUtxo[],
+ returned: [{ sats: 5463112n }] as unknown as ScriptUtxo[],
},
{
description: 'Two inputs exactly covering the price + fee',
@@ -215,14 +215,14 @@
// askedSats 5460736n
// requiredSats 5462059n
utxos: [
- { value: 5461917 },
- { value: 142 },
+ { sats: 5461917n },
+ { sats: 142n },
] as unknown as ScriptUtxo[],
acceptedTokens: 546n,
feePerKb: 1000,
returned: [
- { value: 5461917 },
- { value: 142 },
+ { sats: 5461917n },
+ { sats: 142n },
] as unknown as ScriptUtxo[],
},
{
@@ -232,16 +232,16 @@
// askedSats 5460736n
// requiredSats 5462059n
utxos: [
- { value: 5461917 },
- { value: 141 },
- { value: 142 },
+ { sats: 5461917n },
+ { sats: 141n },
+ { sats: 142n },
] as unknown as ScriptUtxo[],
acceptedTokens: 546n,
feePerKb: 1000,
returned: [
- { value: 5461917 },
- { value: 141 },
- { value: 142 },
+ { sats: 5461917n },
+ { sats: 141n },
+ { sats: 142n },
] as unknown as ScriptUtxo[],
},
],
@@ -250,7 +250,7 @@
description:
'We throw an error if available utxos can only cover 1 satoshi less than price + fee',
agoraOffer: agoraOfferCachetAlphaOne,
- utxos: [{ value: 5461917 }] as unknown as ScriptUtxo[],
+ utxos: [{ sats: 5461917n }] as unknown as ScriptUtxo[],
acceptedTokens: 546n,
feePerKb: 1000,
error: 'Insufficient utxos to accept this offer',
@@ -266,9 +266,9 @@
// acceptFeeSats 740n
// askedSats 5000000000n
// requiredSats 5000000740n
- utxos: [{ value: 5000000740 }] as unknown as ScriptUtxo[],
+ utxos: [{ sats: 5000000740n }] as unknown as ScriptUtxo[],
feePerKb: 1000,
- returned: [{ value: 5000000740 }] as unknown as ScriptUtxo[],
+ returned: [{ sats: 5000000740n }] as unknown as ScriptUtxo[],
},
{
description:
@@ -277,9 +277,9 @@
// acceptFeeSats 1488n
// askedSats 5000000000n
// requiredSats 5000001488n
- utxos: [{ value: 5000001488 }] as unknown as ScriptUtxo[],
+ utxos: [{ sats: 5000001488n }] as unknown as ScriptUtxo[],
feePerKb: 2010,
- returned: [{ value: 5000001488 }] as unknown as ScriptUtxo[],
+ returned: [{ sats: 5000001488n }] as unknown as ScriptUtxo[],
},
{
description: 'Two inputs exactly covering the price + fee',
@@ -288,13 +288,13 @@
// askedSats 5000000000n
// requiredSats 5000000740n
utxos: [
- { value: 5000000739 },
- { value: 142 },
+ { sats: 5000000739n },
+ { sats: 142n },
] as unknown as ScriptUtxo[],
feePerKb: 1000,
returned: [
- { value: 5000000739 },
- { value: 142 },
+ { sats: 5000000739n },
+ { sats: 142n },
] as unknown as ScriptUtxo[],
},
{
@@ -304,15 +304,15 @@
// askedSats 5000000000n
// requiredSats 5000000881n
utxos: [
- { value: 5000000739 },
- { value: 141 },
- { value: 142 },
+ { sats: 5000000739n },
+ { sats: 141n },
+ { sats: 142n },
] as unknown as ScriptUtxo[],
feePerKb: 1000,
returned: [
- { value: 5000000739 },
- { value: 141 },
- { value: 142 },
+ { sats: 5000000739n },
+ { sats: 141n },
+ { sats: 142n },
] as unknown as ScriptUtxo[],
},
],
@@ -321,7 +321,7 @@
description:
'We throw an error if available utxos can only cover 1 satoshi less than price + fee',
oneshotOffer: heismanNftOneOffer,
- utxos: [{ value: 5000000739 }] as unknown as ScriptUtxo[],
+ utxos: [{ sats: 5000000739n }] as unknown as ScriptUtxo[],
feePerKb: 1000,
error: 'Insufficient utxos to accept this offer',
},
@@ -334,31 +334,31 @@
'We can get a single fuel input to cancel the offer, if the wallet has one exactly covering the fee',
agoraOffer: agoraOfferCachetAlphaOne,
// cancelFeeSats 719n
- utxos: [{ value: 719 }] as unknown as ScriptUtxo[],
+ utxos: [{ sats: 719n }] as unknown as ScriptUtxo[],
feePerKb: 1000,
- returned: [{ value: 719 }] as unknown as ScriptUtxo[],
+ returned: [{ sats: 719n }] as unknown as ScriptUtxo[],
},
{
description:
'We can get a single fuel input to cancel the offer, if the wallet has one exactly covering the fee, at a higher than min fee',
agoraOffer: agoraOfferCachetAlphaOne,
// cancelFeeSats 1446
- utxos: [{ value: 1446 }] as unknown as ScriptUtxo[],
+ utxos: [{ sats: 1446n }] as unknown as ScriptUtxo[],
feePerKb: 2010,
- returned: [{ value: 1446 }] as unknown as ScriptUtxo[],
+ returned: [{ sats: 1446n }] as unknown as ScriptUtxo[],
},
{
description: 'Two inputs exactly covering the fee',
agoraOffer: agoraOfferCachetAlphaOne,
// cancelFeeSats 860n
utxos: [
- { value: 718 },
- { value: 142 },
+ { sats: 718n },
+ { sats: 142n },
] as unknown as ScriptUtxo[],
feePerKb: 1000,
returned: [
- { value: 718 },
- { value: 142 },
+ { sats: 718n },
+ { sats: 142n },
] as unknown as ScriptUtxo[],
},
{
@@ -366,15 +366,15 @@
agoraOffer: agoraOfferCachetAlphaOne,
// cancelFeeSats 1001n
utxos: [
- { value: 718 },
- { value: 141 },
- { value: 142 },
+ { sats: 718n },
+ { sats: 141n },
+ { sats: 142n },
] as unknown as ScriptUtxo[],
feePerKb: 1000,
returned: [
- { value: 718 },
- { value: 141 },
- { value: 142 },
+ { sats: 718n },
+ { sats: 141n },
+ { sats: 142n },
] as unknown as ScriptUtxo[],
},
// ONESHOT cases
@@ -383,18 +383,18 @@
'ONESHOT: We can get a single fuel input to cancel the offer, if the wallet has one exactly covering the price + fee',
agoraOffer: heismanNftOneOffer,
// cancelFeeSats 535
- utxos: [{ value: 535 }] as unknown as ScriptUtxo[],
+ utxos: [{ sats: 535n }] as unknown as ScriptUtxo[],
feePerKb: 1000,
- returned: [{ value: 535 }] as unknown as ScriptUtxo[],
+ returned: [{ sats: 535n }] as unknown as ScriptUtxo[],
},
{
description:
'ONESHOT: We can get a single fuel input to cancel the offer, if the wallet has one exactly covering the price + fee, at a higher than min fee',
agoraOffer: heismanNftOneOffer,
// cancelFeeSats 1076
- utxos: [{ value: 1076 }] as unknown as ScriptUtxo[],
+ utxos: [{ sats: 1076n }] as unknown as ScriptUtxo[],
feePerKb: 2010,
- returned: [{ value: 1076 }] as unknown as ScriptUtxo[],
+ returned: [{ sats: 1076n }] as unknown as ScriptUtxo[],
},
{
description:
@@ -402,13 +402,13 @@
agoraOffer: heismanNftOneOffer,
// cancelFeeSats 676
utxos: [
- { value: 534 },
- { value: 142 },
+ { sats: 534n },
+ { sats: 142n },
] as unknown as ScriptUtxo[],
feePerKb: 1000,
returned: [
- { value: 534 },
- { value: 142 },
+ { sats: 534n },
+ { sats: 142n },
] as unknown as ScriptUtxo[],
},
{
@@ -417,15 +417,15 @@
agoraOffer: heismanNftOneOffer,
// cancelFeeSats 817
utxos: [
- { value: 534 },
- { value: 141 },
- { value: 142 },
+ { sats: 534n },
+ { sats: 141n },
+ { sats: 142n },
] as unknown as ScriptUtxo[],
feePerKb: 1000,
returned: [
- { value: 534 },
- { value: 141 },
- { value: 142 },
+ { sats: 534n },
+ { sats: 141n },
+ { sats: 142n },
] as unknown as ScriptUtxo[],
},
],
@@ -434,7 +434,7 @@
description:
'We throw an error if available utxos can only cover 1 satoshi less than fee',
agoraOffer: agoraOfferCachetAlphaOne,
- utxos: [{ value: 718 }] as unknown as ScriptUtxo[],
+ utxos: [{ sats: 718n }] as unknown as ScriptUtxo[],
feePerKb: 1000,
error: 'Insufficient utxos to cancel this offer',
},
@@ -444,7 +444,7 @@
agoraOffer: heismanNftOneOffer,
utxos: [
{
- value: 534,
+ sats: 534n,
},
] as unknown as ScriptUtxo[],
feePerKb: 1000,
diff --git a/modules/ecash-agora/src/inputs.ts b/modules/ecash-agora/src/inputs.ts
--- a/modules/ecash-agora/src/inputs.ts
+++ b/modules/ecash-agora/src/inputs.ts
@@ -16,7 +16,7 @@
const DUMMY_TXID =
'1111111111111111111111111111111111111111111111111111111111111111';
const DUMMY_WALLET_HASH = fromHex('12'.repeat(20));
-const DUMMY_SUFFICIENT_CANCEL_VALUE = 10000;
+const DUMMY_SUFFICIENT_CANCEL_VALUE = 10000n;
const DUMMY_SCRIPT = Script.p2pkh(DUMMY_WALLET_HASH);
export const DUMMY_KEYPAIR = {
sk: fromHex('33'.repeat(32)),
@@ -33,7 +33,7 @@
outIdx: 1,
},
signData: {
- value: DUMMY_SUFFICIENT_CANCEL_VALUE,
+ sats: DUMMY_SUFFICIENT_CANCEL_VALUE,
outputScript: DUMMY_SCRIPT,
},
},
@@ -57,7 +57,7 @@
fuelInputs.push(utxo);
// Match our fuelInput count with dummyInputs
dummyInputs.push(DUMMY_INPUT);
- inputSatoshis += BigInt(utxo.value);
+ inputSatoshis += utxo.sats;
const askedSats = agoraOffer.askedSats(BigInt(acceptedTokens));
@@ -98,7 +98,7 @@
fuelInputs.push(utxo);
// Match our fuelInput count with dummyInputs
dummyInputs.push(DUMMY_INPUT);
- inputSatoshis += BigInt(utxo.value);
+ inputSatoshis += utxo.sats;
const askedSats = agoraOffer.askedSats();
@@ -135,7 +135,7 @@
fuelInputs.push(utxo);
// Match our fuelInput count with dummyInputs
dummyInputs.push(DUMMY_INPUT);
- inputSatoshis += BigInt(utxo.value);
+ inputSatoshis += utxo.sats;
// Get the tx fee for this tx
// In practice, this is always bigger than dust
diff --git a/modules/ecash-agora/src/oneshot.ts b/modules/ecash-agora/src/oneshot.ts
--- a/modules/ecash-agora/src/oneshot.ts
+++ b/modules/ecash-agora/src/oneshot.ts
@@ -181,7 +181,7 @@
const outputsSerBytes = new Bytes(outputsSerOp.data);
const enforcedOutputs: TxOutput[] = [
{
- value: BigInt(0),
+ sats: 0n,
script: opreturnScript,
},
];
@@ -220,7 +220,7 @@
public askedSats(): bigint {
return this.enforcedOutputs.reduce(
- (prev, output) => prev + BigInt(output.value),
+ (prev, output) => prev + output.sats,
0n,
);
}
diff --git a/modules/ecash-agora/src/partial.approx.test.ts b/modules/ecash-agora/src/partial.approx.test.ts
--- a/modules/ecash-agora/src/partial.approx.test.ts
+++ b/modules/ecash-agora/src/partial.approx.test.ts
@@ -84,7 +84,7 @@
priceNanoSatsPerToken: 1000000n,
minAcceptedTokens: 1000n,
...BASE_PARAMS_SLP,
- dustAmount: 1,
+ dustAmount: 1n,
});
expect(params).to.deep.equal(
new AgoraPartial({
@@ -95,7 +95,7 @@
numSatsTruncBytes: 0,
minAcceptedScaledTruncTokens: 1073742000n,
...BASE_PARAMS_SLP,
- dustAmount: 1,
+ dustAmount: 1n,
scriptLen: params.scriptLen,
}),
);
@@ -331,7 +331,7 @@
priceNanoSatsPerToken: 1000000n,
minAcceptedTokens: 1000n,
...BASE_PARAMS_SLP,
- dustAmount: 1,
+ dustAmount: 1n,
});
expect(params).to.deep.equal(
new AgoraPartial({
@@ -342,7 +342,7 @@
numSatsTruncBytes: 0,
minAcceptedScaledTruncTokens: 2145000n,
...BASE_PARAMS_SLP,
- dustAmount: 1,
+ dustAmount: 1n,
scriptLen: params.scriptLen,
}),
);
diff --git a/modules/ecash-agora/src/partial.ts b/modules/ecash-agora/src/partial.ts
--- a/modules/ecash-agora/src/partial.ts
+++ b/modules/ecash-agora/src/partial.ts
@@ -131,7 +131,7 @@
**/
enforcedLockTime: number;
/** Dust amount to be used by the script. */
- dustAmount?: number;
+ dustAmount?: bigint;
/**
* Minimum tokenScaleFactor when approximating numTokenTruncBytes.
* It is recommended to leave this at the default (1000), but it is exposed
@@ -268,7 +268,7 @@
* Dust amount of the network, the Script will enforce token outputs to have
* this amount.
**/
- public dustAmount: number;
+ public dustAmount: bigint;
public constructor(params: {
truncTokens: bigint;
@@ -283,7 +283,7 @@
tokenProtocol: 'SLP' | 'ALP';
scriptLen: number;
enforcedLockTime: number;
- dustAmount: number;
+ dustAmount: bigint;
}) {
this.truncTokens = params.truncTokens;
this.numTokenTruncBytes = params.numTokenTruncBytes;
@@ -648,7 +648,7 @@
// protocol intros.
if (this.tokenProtocol === 'SLP') {
const slpSendIntro = slpSend(this.tokenId, this.tokenType, [
- 0,
+ 0n,
]).bytecode;
const covenantConstsWriter = new WriterBytes(
slpSendIntro.length + adPushdata.length,
diff --git a/modules/ecash-agora/tests/oneshot.test.ts b/modules/ecash-agora/tests/oneshot.test.ts
--- a/modules/ecash-agora/tests/oneshot.test.ts
+++ b/modules/ecash-agora/tests/oneshot.test.ts
@@ -36,7 +36,7 @@
use(chaiAsPromised);
const NUM_COINS = 500;
-const COIN_VALUE = 100000;
+const COIN_VALUE = 100000n;
const SLP_TOKEN_TYPE_NFT1_GROUP = {
number: 0x81,
@@ -92,7 +92,7 @@
const buyerPkh = shaRmd160(buyerPk);
const buyerP2pkh = Script.p2pkh(buyerPkh);
- await runner.sendToScript(50000, sellerP2pkh);
+ await runner.sendToScript(50000n, sellerP2pkh);
const utxos = await chronik.script('p2pkh', toHex(sellerPkh)).utxos();
expect(utxos.utxos.length).to.equal(1);
@@ -105,7 +105,7 @@
input: {
prevOut: utxo.outpoint,
signData: {
- value: utxo.value,
+ sats: utxo.sats,
outputScript: sellerP2pkh,
},
},
@@ -114,17 +114,17 @@
],
outputs: [
{
- value: 0,
+ sats: 0n,
script: slpGenesis(
SLP_NFT1_GROUP,
{
tokenTicker: 'SLP NFT1 GROUP TOKEN',
decimals: 4,
},
- 1,
+ 1n,
),
},
- { value: 10000, script: sellerP2pkh },
+ { sats: 10000n, script: sellerP2pkh },
],
});
const genesisTx = txBuildGenesisGroup.sign();
@@ -154,7 +154,7 @@
outIdx: 1,
},
signData: {
- value: 10000,
+ sats: 10000n,
outputScript: sellerP2pkh,
},
},
@@ -163,17 +163,17 @@
],
outputs: [
{
- value: 0,
+ sats: 0n,
script: slpGenesis(
SLP_NFT1_CHILD,
{
tokenTicker: 'SLP NFT1 CHILD TOKEN',
decimals: 0,
},
- 1,
+ 1n,
),
},
- { value: 8000, script: sellerP2pkh },
+ { sats: 8000n, script: sellerP2pkh },
],
});
const genesisChildTx = txBuildGenesisChild.sign();
@@ -214,10 +214,10 @@
// covenant that asks for 80000 sats
const enforcedOutputs: TxOutput[] = [
{
- value: BigInt(0),
- script: slpSend(childTokenId, SLP_NFT1_CHILD, [0, 1]),
+ sats: BigInt(0),
+ script: slpSend(childTokenId, SLP_NFT1_CHILD, [0n, 1n]),
},
- { value: BigInt(80000), script: sellerP2pkh },
+ { sats: BigInt(80000), script: sellerP2pkh },
];
const agoraOneshot = new AgoraOneshot({
enforcedOutputs,
@@ -234,7 +234,7 @@
outIdx: 1,
},
signData: {
- value: 8000,
+ sats: 8000n,
outputScript: sellerP2pkh,
},
},
@@ -243,10 +243,10 @@
],
outputs: [
{
- value: 0,
- script: slpSend(childTokenId, SLP_NFT1_CHILD, [1]),
+ sats: 0n,
+ script: slpSend(childTokenId, SLP_NFT1_CHILD, [1n]),
},
- { value: 7000, script: agoraAdP2sh },
+ { sats: 7000n, script: agoraAdP2sh },
],
});
const adSetupTx = txBuildAdSetup.sign();
@@ -264,7 +264,7 @@
outIdx: 1,
},
signData: {
- value: 7000,
+ sats: 7000n,
redeemScript: agoraAdScript,
},
},
@@ -273,10 +273,10 @@
],
outputs: [
{
- value: 0,
- script: slpSend(childTokenId, SLP_NFT1_CHILD, [1]),
+ sats: 0n,
+ script: slpSend(childTokenId, SLP_NFT1_CHILD, [1n]),
},
- { value: 546, script: agoraP2sh },
+ { sats: 546n, script: agoraP2sh },
],
});
const offerTx = txBuildOffer.sign();
@@ -290,7 +290,7 @@
prevOut: offerOutpoint,
signData: {
redeemScript: agoraScript,
- value: 546,
+ sats: 546n,
},
};
@@ -305,7 +305,7 @@
token: {
tokenId: childTokenId,
tokenType: SLP_TOKEN_TYPE_NFT1_CHILD,
- amount: '1',
+ atoms: 1n,
isMintBaton: false,
},
status: 'OPEN',
@@ -369,7 +369,7 @@
});
// 6. Buyer attempts to buy the NFT using 79999 sats, which is rejected
- const buyerSatsTxid = await runner.sendToScript(90000, buyerP2pkh);
+ const buyerSatsTxid = await runner.sendToScript(90000n, buyerP2pkh);
const txBuildAcceptFail = new TxBuilder({
version: 2,
inputs: [
@@ -388,7 +388,7 @@
outIdx: 0,
},
signData: {
- value: 90000,
+ sats: 90000n,
outputScript: buyerP2pkh,
},
},
@@ -397,12 +397,12 @@
],
outputs: [
{
- value: 0,
- script: slpSend(childTokenId, SLP_NFT1_CHILD, [0, 1]),
+ sats: 0n,
+ script: slpSend(childTokenId, SLP_NFT1_CHILD, [0n, 1n]),
},
// failure: one sat missing
- { value: 79999, script: sellerP2pkh },
- { value: 546, script: buyerP2pkh },
+ { sats: 79999n, script: sellerP2pkh },
+ { sats: 546n, script: buyerP2pkh },
],
});
@@ -415,10 +415,10 @@
// with a new advertisement
const newEnforcedOutputs: TxOutput[] = [
{
- value: BigInt(0),
- script: slpSend(childTokenId, SLP_NFT1_CHILD, [0, 1]),
+ sats: BigInt(0),
+ script: slpSend(childTokenId, SLP_NFT1_CHILD, [0n, 1n]),
},
- { value: BigInt(70000), script: sellerP2pkh },
+ { sats: BigInt(70000), script: sellerP2pkh },
];
const newAgoraOneshot = new AgoraOneshot({
enforcedOutputs: newEnforcedOutputs,
@@ -427,7 +427,7 @@
const newAgoraScript = newAgoraOneshot.script();
const newAgoraP2sh = Script.p2sh(shaRmd160(newAgoraScript.bytecode));
const newAgoraAdScript = newAgoraOneshot.adScript();
- const cancelFeeSats = 600;
+ const cancelFeeSats = 600n;
const newAdSetupTxid = await runner.sendToScript(
cancelFeeSats,
Script.p2sh(shaRmd160(newAgoraAdScript.bytecode)),
@@ -441,7 +441,7 @@
outIdx: 0,
},
signData: {
- value: cancelFeeSats,
+ sats: cancelFeeSats,
redeemScript: newAgoraAdScript,
},
},
@@ -469,7 +469,7 @@
prevOut: newOfferOutpoint,
signData: {
redeemScript: newAgoraScript,
- value: 546,
+ sats: 546n,
},
};
@@ -572,7 +572,7 @@
token: {
tokenId: childTokenId,
tokenType: SLP_TOKEN_TYPE_NFT1_CHILD,
- amount: '1',
+ atoms: 1n,
isMintBaton: false,
},
status: 'OPEN',
@@ -620,8 +620,8 @@
// 9. Buyer successfully accepts advertized NFT offer for 70000 sats
const offer2 = (await agora.activeOffersByTokenId(childTokenId))[0];
expect(offer2.askedSats()).to.equal(70000n);
- const acceptFeeSats = 740;
- const acceptSats = acceptFeeSats + Number(offer2.askedSats());
+ const acceptFeeSats = 740n;
+ const acceptSats = acceptFeeSats + offer2.askedSats();
const acceptSatsTxid = await runner.sendToScript(
acceptSats,
buyerP2pkh,
@@ -633,7 +633,7 @@
outIdx: 0,
},
signData: {
- value: acceptSats,
+ sats: acceptSats,
outputScript: buyerP2pkh,
},
},
@@ -692,10 +692,10 @@
{
...newExpectedOffer,
takenInfo: {
- satoshisPaid: 70000,
+ satoshisPaid: 70000n,
takerScriptHex:
'76a914531260aa2a199e228c537dfa42c82bea2c7c1f4d88ac',
- baseTokens: '1',
+ atoms: 1n,
},
status: 'TAKEN',
},
diff --git a/modules/ecash-agora/tests/partial-helper-alp.ts b/modules/ecash-agora/tests/partial-helper-alp.ts
--- a/modules/ecash-agora/tests/partial-helper-alp.ts
+++ b/modules/ecash-agora/tests/partial-helper-alp.ts
@@ -7,7 +7,7 @@
ALL_BIP143,
alpGenesis,
alpSend,
- Amount,
+ Atoms,
Ecc,
emppScript,
P2PKHSignatory,
@@ -25,7 +25,7 @@
export function makeAlpGenesis(params: {
tokenType: number;
fuelInput: TxBuilderInput;
- tokenAmounts: Amount[];
+ tokenAmounts: Atoms[];
extraOutputs: TxBuilderOutput[];
}) {
const { tokenType, fuelInput } = params;
@@ -33,7 +33,7 @@
inputs: [fuelInput],
outputs: [
{
- value: 0,
+ sats: 0n,
script: emppScript([
alpGenesis(
tokenType,
@@ -65,12 +65,12 @@
const makerPkh = shaRmd160(makerPk);
const makerP2pkh = Script.p2pkh(makerPkh);
- const genesisOutputSats = 2000;
+ const genesisOutputSats = 2000n;
const genesisTx = makeAlpGenesis({
tokenType: agoraPartial.tokenType,
fuelInput,
tokenAmounts: [agoraPartial.offeredTokens()],
- extraOutputs: [{ value: genesisOutputSats, script: makerP2pkh }],
+ extraOutputs: [{ sats: genesisOutputSats, script: makerP2pkh }],
});
const genesisTxid = (await chronik.broadcastTx(genesisTx.ser())).txid;
const tokenId = genesisTxid;
@@ -91,7 +91,7 @@
outIdx: 1,
},
signData: {
- value: genesisOutputSats,
+ sats: genesisOutputSats,
outputScript: makerP2pkh,
},
},
@@ -100,7 +100,7 @@
],
outputs: [
{
- value: 0,
+ sats: 0n,
script: emppScript([
agoraPartial.adPushdata(),
alpSend(tokenId, agoraPartial.tokenType, [
@@ -108,7 +108,7 @@
]),
]),
},
- { value: 546, script: agoraP2sh },
+ { sats: 546n, script: agoraP2sh },
],
});
const offerTx = txBuildOffer.sign();
diff --git a/modules/ecash-agora/tests/partial-helper-slp.ts b/modules/ecash-agora/tests/partial-helper-slp.ts
--- a/modules/ecash-agora/tests/partial-helper-slp.ts
+++ b/modules/ecash-agora/tests/partial-helper-slp.ts
@@ -30,12 +30,12 @@
const makerPkh = shaRmd160(makerPk);
const makerP2pkh = Script.p2pkh(makerPkh);
- const genesisOutputSats = 2000;
+ const genesisOutputSats = 2000n;
const txBuildGenesisGroup = new TxBuilder({
inputs: [fuelInput],
outputs: [
{
- value: 0,
+ sats: 0n,
script: slpGenesis(
agoraPartial.tokenType,
{
@@ -45,7 +45,7 @@
agoraPartial.offeredTokens(),
),
},
- { value: genesisOutputSats, script: makerP2pkh },
+ { sats: genesisOutputSats, script: makerP2pkh },
],
});
const genesisTx = txBuildGenesisGroup.sign();
@@ -69,7 +69,7 @@
outIdx: 1,
},
signData: {
- value: genesisOutputSats,
+ sats: genesisOutputSats,
outputScript: makerP2pkh,
},
},
@@ -78,12 +78,12 @@
],
outputs: [
{
- value: 0,
+ sats: 0n,
script: slpSend(tokenId, agoraPartial.tokenType, [
agoraPartial.offeredTokens(),
]),
},
- { value: adSetupSats, script: agoraAdP2sh },
+ { sats: adSetupSats, script: agoraAdP2sh },
],
});
const adSetupTx = txBuildAdSetup.sign();
@@ -100,7 +100,7 @@
outIdx: 1,
},
signData: {
- value: adSetupSats,
+ sats: adSetupSats,
redeemScript: agoraAdScript,
},
},
@@ -109,12 +109,12 @@
],
outputs: [
{
- value: 0,
+ sats: 0n,
script: slpSend(tokenId, agoraPartial.tokenType, [
agoraPartial.offeredTokens(),
]),
},
- { value: 546, script: agoraP2sh },
+ { sats: 546n, script: agoraP2sh },
],
});
const offerTx = txBuildOffer.sign();
diff --git a/modules/ecash-agora/tests/partial.alp.bigsats.test.ts b/modules/ecash-agora/tests/partial.alp.bigsats.test.ts
--- a/modules/ecash-agora/tests/partial.alp.bigsats.test.ts
+++ b/modules/ecash-agora/tests/partial.alp.bigsats.test.ts
@@ -34,7 +34,7 @@
dustAmount: DEFAULT_DUST_LIMIT,
};
-const BIGSATS = 149 * 5000000000 - 20000;
+const BIGSATS = BigInt(149 * 5000000000 - 20000);
const ecc = new Ecc();
@@ -51,17 +51,17 @@
async function makeBuilderInputs(
runner: TestRunner,
- values: number[],
+ values: bigint[],
): Promise<TxBuilderInput[]> {
const txid = await runner.sendToScript(values, makerScript);
- return values.map((value, outIdx) => ({
+ return values.map((sats, outIdx) => ({
input: {
prevOut: {
txid,
outIdx,
},
signData: {
- value,
+ sats,
outputScript: makerScript,
},
},
@@ -76,7 +76,7 @@
before(async () => {
runner = await TestRunner.setup('setup_scripts/ecash-agora_base');
chronik = runner.chronik;
- await runner.setupCoins(1, BIGSATS + 11000);
+ await runner.setupCoins(1, BIGSATS + 11000n);
});
after(() => {
@@ -85,7 +85,7 @@
it('AgoraPartial ALP 7450M XEC vs 2p48-1 full accept', async () => {
const [fuelInput, takerInput] = await makeBuilderInputs(runner, [
- 10000,
+ 10000n,
BIGSATS,
]);
@@ -144,23 +144,21 @@
emppScript([
agoraPartial.adPushdata(),
alpSend(agoraPartial.tokenId, agoraPartial.tokenType, [
- 0,
+ 0n,
agoraPartial.offeredTokens(),
]),
]).bytecode,
),
);
- expect(acceptTx.outputs[0].value).to.equal(0);
+ expect(acceptTx.outputs[0].sats).to.equal(0n);
expect(acceptTx.outputs[0].token).to.equal(undefined);
// 1st output is sats to maker
expect(acceptTx.outputs[1].token).to.equal(undefined);
- expect(acceptTx.outputs[1].value).to.equal(734936694784);
+ expect(acceptTx.outputs[1].sats).to.equal(734936694784n);
expect(acceptTx.outputs[1].outputScript).to.equal(makerScriptHex);
// 2nd output is tokens to taker
- expect(acceptTx.outputs[2].token?.amount).to.equal(
- 0xffffff000000n.toString(),
- );
- expect(acceptTx.outputs[2].value).to.equal(DEFAULT_DUST_LIMIT);
+ expect(acceptTx.outputs[2].token?.atoms).to.equal(0xffffff000000n);
+ expect(acceptTx.outputs[2].sats).to.equal(DEFAULT_DUST_LIMIT);
expect(acceptTx.outputs[2].outputScript).to.equal(takerScriptHex);
});
});
@@ -168,12 +166,11 @@
describe('AgoraPartial 7450M XEC vs 2p48-1 small accept', () => {
let runner: TestRunner;
let chronik: ChronikClient;
- let ecc: Ecc;
before(async () => {
runner = await TestRunner.setup('setup_scripts/ecash-agora_base');
chronik = runner.chronik;
- await runner.setupCoins(1, BIGSATS + 11000);
+ await runner.setupCoins(1, BIGSATS + 11000n);
});
after(() => {
@@ -182,7 +179,7 @@
it('AgoraPartial ALP 7450M XEC vs 2p48-1 small accept', async () => {
const [fuelInput, takerInput] = await makeBuilderInputs(runner, [
- 10000,
+ 10000n,
BIGSATS,
]);
@@ -244,30 +241,28 @@
emppScript([
agoraPartial.adPushdata(),
alpSend(agoraPartial.tokenId, agoraPartial.tokenType, [
- 0,
+ 0n,
agoraPartial.offeredTokens() - acceptedTokens,
acceptedTokens,
]),
]).bytecode,
),
);
- expect(acceptTx.outputs[0].value).to.equal(0);
+ expect(acceptTx.outputs[0].sats).to.equal(0n);
expect(acceptTx.outputs[0].token).to.equal(undefined);
// 1st output is sats to maker
expect(acceptTx.outputs[1].token).to.equal(undefined);
- expect(acceptTx.outputs[1].value).to.equal(549755813888);
+ expect(acceptTx.outputs[1].sats).to.equal(549755813888n);
expect(acceptTx.outputs[1].outputScript).to.equal(makerScriptHex);
// 2nd output is back to the P2SH Script
- expect(acceptTx.outputs[2].token?.amount).to.equal(
- (agoraPartial.offeredTokens() - acceptedTokens).toString(),
+ expect(acceptTx.outputs[2].token?.atoms).to.equal(
+ agoraPartial.offeredTokens() - acceptedTokens,
);
- expect(acceptTx.outputs[2].value).to.equal(DEFAULT_DUST_LIMIT);
+ expect(acceptTx.outputs[2].sats).to.equal(DEFAULT_DUST_LIMIT);
expect(acceptTx.outputs[2].outputScript.slice(0, 4)).to.equal('a914');
// 3rd output is tokens to taker
- expect(acceptTx.outputs[3].token?.amount).to.equal(
- acceptedTokens.toString(),
- );
- expect(acceptTx.outputs[3].value).to.equal(DEFAULT_DUST_LIMIT);
+ expect(acceptTx.outputs[3].token?.atoms).to.equal(acceptedTokens);
+ expect(acceptTx.outputs[3].sats).to.equal(DEFAULT_DUST_LIMIT);
expect(acceptTx.outputs[3].outputScript).to.equal(takerScriptHex);
});
});
@@ -279,7 +274,7 @@
before(async () => {
runner = await TestRunner.setup('setup_scripts/ecash-agora_base');
chronik = runner.chronik;
- await runner.setupCoins(1, BIGSATS + 11000);
+ await runner.setupCoins(1, BIGSATS + 11000n);
});
after(() => {
@@ -288,7 +283,7 @@
it('AgoraPartial ALP 7450M XEC vs 2p47-1 full accept', async () => {
const [fuelInput, takerInput] = await makeBuilderInputs(runner, [
- 10000,
+ 10000n,
BIGSATS,
]);
@@ -347,23 +342,23 @@
emppScript([
agoraPartial.adPushdata(),
alpSend(agoraPartial.tokenId, agoraPartial.tokenType, [
- 0,
+ 0n,
agoraPartial.offeredTokens(),
]),
]).bytecode,
),
);
- expect(acceptTx.outputs[0].value).to.equal(0);
+ expect(acceptTx.outputs[0].sats).to.equal(0n);
expect(acceptTx.outputs[0].token).to.equal(undefined);
// 1st output is sats to maker
expect(acceptTx.outputs[1].token).to.equal(undefined);
- expect(acceptTx.outputs[1].value).to.equal(703687426048);
+ expect(acceptTx.outputs[1].sats).to.equal(703687426048n);
expect(acceptTx.outputs[1].outputScript).to.equal(makerScriptHex);
// 2nd output is tokens to taker
- expect(acceptTx.outputs[2].token?.amount).to.equal(
- agoraPartial.offeredTokens().toString(),
+ expect(acceptTx.outputs[2].token?.atoms).to.equal(
+ agoraPartial.offeredTokens(),
);
- expect(acceptTx.outputs[2].value).to.equal(DEFAULT_DUST_LIMIT);
+ expect(acceptTx.outputs[2].sats).to.equal(DEFAULT_DUST_LIMIT);
expect(acceptTx.outputs[2].outputScript).to.equal(takerScriptHex);
});
});
@@ -375,7 +370,7 @@
before(async () => {
runner = await TestRunner.setup('setup_scripts/ecash-agora_base');
chronik = runner.chronik;
- await runner.setupCoins(1, BIGSATS + 11000);
+ await runner.setupCoins(1, BIGSATS + 11000n);
});
after(() => {
@@ -384,7 +379,7 @@
it('AgoraPartial ALP 7450M XEC vs 2p47-1 small accept', async () => {
const [fuelInput, takerInput] = await makeBuilderInputs(runner, [
- 10000,
+ 10000n,
BIGSATS,
]);
@@ -446,30 +441,28 @@
emppScript([
agoraPartial.adPushdata(),
alpSend(agoraPartial.tokenId, agoraPartial.tokenType, [
- 0,
+ 0n,
agoraPartial.offeredTokens() - acceptedTokens,
acceptedTokens,
]),
]).bytecode,
),
);
- expect(acceptTx.outputs[0].value).to.equal(0);
+ expect(acceptTx.outputs[0].sats).to.equal(0n);
expect(acceptTx.outputs[0].token).to.equal(undefined);
// 1st output is sats to maker
expect(acceptTx.outputs[1].token).to.equal(undefined);
- expect(acceptTx.outputs[1].value).to.equal(549755813888);
+ expect(acceptTx.outputs[1].sats).to.equal(549755813888n);
expect(acceptTx.outputs[1].outputScript).to.equal(makerScriptHex);
// 2nd output is back to the P2SH Script
- expect(acceptTx.outputs[2].token?.amount).to.equal(
- (agoraPartial.offeredTokens() - acceptedTokens).toString(),
+ expect(acceptTx.outputs[2].token?.atoms).to.equal(
+ agoraPartial.offeredTokens() - acceptedTokens,
);
- expect(acceptTx.outputs[2].value).to.equal(DEFAULT_DUST_LIMIT);
+ expect(acceptTx.outputs[2].sats).to.equal(DEFAULT_DUST_LIMIT);
expect(acceptTx.outputs[2].outputScript.slice(0, 4)).to.equal('a914');
// 3rd output is tokens to taker
- expect(acceptTx.outputs[3].token?.amount).to.equal(
- acceptedTokens.toString(),
- );
- expect(acceptTx.outputs[3].value).to.equal(DEFAULT_DUST_LIMIT);
+ expect(acceptTx.outputs[3].token?.atoms).to.equal(acceptedTokens);
+ expect(acceptTx.outputs[3].sats).to.equal(DEFAULT_DUST_LIMIT);
expect(acceptTx.outputs[3].outputScript).to.equal(takerScriptHex);
});
});
@@ -481,7 +474,7 @@
before(async () => {
runner = await TestRunner.setup('setup_scripts/ecash-agora_base');
chronik = runner.chronik;
- await runner.setupCoins(1, BIGSATS + 11000);
+ await runner.setupCoins(1, BIGSATS + 11000n);
});
after(() => {
@@ -490,7 +483,7 @@
it('AgoraPartial ALP 7450M XEC vs 100 full accept', async () => {
const [fuelInput, takerInput] = await makeBuilderInputs(runner, [
- 10000,
+ 10000n,
BIGSATS,
]);
@@ -547,21 +540,21 @@
emppScript([
agoraPartial.adPushdata(),
alpSend(agoraPartial.tokenId, agoraPartial.tokenType, [
- 0,
+ 0n,
agoraPartial.offeredTokens(),
]),
]).bytecode,
),
);
- expect(acceptTx.outputs[0].value).to.equal(0);
+ expect(acceptTx.outputs[0].sats).to.equal(0n);
expect(acceptTx.outputs[0].token).to.equal(undefined);
// 1st output is sats to maker
expect(acceptTx.outputs[1].token).to.equal(undefined);
- expect(acceptTx.outputs[1].value).to.equal(712360591360);
+ expect(acceptTx.outputs[1].sats).to.equal(712360591360n);
expect(acceptTx.outputs[1].outputScript).to.equal(makerScriptHex);
// 2nd output is tokens to taker
- expect(acceptTx.outputs[2].token?.amount).to.equal('100');
- expect(acceptTx.outputs[2].value).to.equal(DEFAULT_DUST_LIMIT);
+ expect(acceptTx.outputs[2].token?.atoms).to.equal(100n);
+ expect(acceptTx.outputs[2].sats).to.equal(DEFAULT_DUST_LIMIT);
expect(acceptTx.outputs[2].outputScript).to.equal(takerScriptHex);
});
});
@@ -573,7 +566,7 @@
before(async () => {
runner = await TestRunner.setup('setup_scripts/ecash-agora_base');
chronik = runner.chronik;
- await runner.setupCoins(1, BIGSATS + 11000);
+ await runner.setupCoins(1, BIGSATS + 11000n);
});
after(() => {
@@ -582,7 +575,7 @@
it('AgoraPartial ALP 7450M XEC vs 100 small accept', async () => {
const [fuelInput, takerInput] = await makeBuilderInputs(runner, [
- 10000,
+ 10000n,
BIGSATS,
]);
@@ -634,27 +627,27 @@
toHex(
emppScript([
agoraPartial.adPushdata(),
- alpSend(
- agoraPartial.tokenId,
- agoraPartial.tokenType,
- [0, 99, 1],
- ),
+ alpSend(agoraPartial.tokenId, agoraPartial.tokenType, [
+ 0n,
+ 99n,
+ 1n,
+ ]),
]).bytecode,
),
);
- expect(acceptTx.outputs[0].value).to.equal(0);
+ expect(acceptTx.outputs[0].sats).to.equal(0n);
expect(acceptTx.outputs[0].token).to.equal(undefined);
// 1st output is sats to maker
expect(acceptTx.outputs[1].token).to.equal(undefined);
- expect(acceptTx.outputs[1].value).to.equal(712964571136);
+ expect(acceptTx.outputs[1].sats).to.equal(712964571136n);
expect(acceptTx.outputs[1].outputScript).to.equal(makerScriptHex);
// 2nd output is back to the P2SH Script
- expect(acceptTx.outputs[2].token?.amount).to.equal('99');
- expect(acceptTx.outputs[2].value).to.equal(DEFAULT_DUST_LIMIT);
+ expect(acceptTx.outputs[2].token?.atoms).to.equal(99n);
+ expect(acceptTx.outputs[2].sats).to.equal(DEFAULT_DUST_LIMIT);
expect(acceptTx.outputs[2].outputScript.slice(0, 4)).to.equal('a914');
// 3rd output is tokens to taker
- expect(acceptTx.outputs[3].token?.amount).to.equal('1');
- expect(acceptTx.outputs[3].value).to.equal(DEFAULT_DUST_LIMIT);
+ expect(acceptTx.outputs[3].token?.atoms).to.equal(1n);
+ expect(acceptTx.outputs[3].sats).to.equal(DEFAULT_DUST_LIMIT);
expect(acceptTx.outputs[3].outputScript).to.equal(takerScriptHex);
});
});
diff --git a/modules/ecash-agora/tests/partial.alp.test.ts b/modules/ecash-agora/tests/partial.alp.test.ts
--- a/modules/ecash-agora/tests/partial.alp.test.ts
+++ b/modules/ecash-agora/tests/partial.alp.test.ts
@@ -29,7 +29,7 @@
// This test needs a lot of sats
const NUM_COINS = 500;
-const COIN_VALUE = 1100000000;
+const COIN_VALUE = 1100000000n;
const BASE_PARAMS_ALP = {
tokenId: '00'.repeat(32), // filled in later
@@ -55,17 +55,17 @@
let chronik: ChronikClient;
async function makeBuilderInputs(
- values: number[],
+ values: bigint[],
): Promise<TxBuilderInput[]> {
const txid = await runner.sendToScript(values, makerScript);
- return values.map((value, outIdx) => ({
+ return values.map((sats, outIdx) => ({
input: {
prevOut: {
txid,
outIdx,
},
signData: {
- value,
+ sats,
outputScript: makerScript,
},
},
@@ -430,8 +430,8 @@
const askedSats = agoraPartial.askedSats(testCase.acceptedTokens);
const requiredSats = askedSats + 2000n;
const [fuelInput, takerInput] = await makeBuilderInputs([
- 4000,
- Number(requiredSats),
+ 4000n,
+ requiredSats,
]);
const offer = await makeAlpOffer({
@@ -461,24 +461,24 @@
alpSend(
agoraPartial.tokenId,
agoraPartial.tokenType,
- [0, agoraPartial.offeredTokens()],
+ [0n, agoraPartial.offeredTokens()],
),
]).bytecode,
),
);
- expect(acceptTx.outputs[0].value).to.equal(0);
+ expect(acceptTx.outputs[0].sats).to.equal(0n);
expect(acceptTx.outputs[0].token).to.equal(undefined);
// 1st output is sats to maker
expect(acceptTx.outputs[1].token).to.equal(undefined);
- expect(acceptTx.outputs[1].value).to.equal(testCase.askedSats);
+ expect(acceptTx.outputs[1].sats).to.equal(testCase.askedSats);
expect(acceptTx.outputs[1].outputScript).to.equal(
makerScriptHex,
);
// 2nd output is tokens to taker
- expect(acceptTx.outputs[2].token?.amount).to.equal(
- offeredTokens.toString(),
+ expect(acceptTx.outputs[2].token?.atoms).to.equal(
+ offeredTokens,
);
- expect(acceptTx.outputs[2].value).to.equal(DEFAULT_DUST_LIMIT);
+ expect(acceptTx.outputs[2].sats).to.equal(DEFAULT_DUST_LIMIT);
expect(acceptTx.outputs[2].outputScript).to.equal(
takerScriptHex,
);
@@ -500,32 +500,30 @@
emppScript([
agoraPartial.adPushdata(),
alpSend(agoraPartial.tokenId, agoraPartial.tokenType, [
- 0,
+ 0n,
leftoverTokens,
testCase.acceptedTokens,
]),
]).bytecode,
),
);
- expect(acceptTx.outputs[0].value).to.equal(0);
+ expect(acceptTx.outputs[0].sats).to.equal(0n);
expect(acceptTx.outputs[0].token).to.equal(undefined);
// 1st output is sats to maker
expect(acceptTx.outputs[1].token).to.equal(undefined);
- expect(acceptTx.outputs[1].value).to.equal(testCase.askedSats);
+ expect(acceptTx.outputs[1].sats).to.equal(testCase.askedSats);
expect(acceptTx.outputs[1].outputScript).to.equal(makerScriptHex);
// 2nd output is back to the P2SH Script
- expect(acceptTx.outputs[2].token?.amount).to.equal(
- leftoverTokens.toString(),
- );
- expect(acceptTx.outputs[2].value).to.equal(DEFAULT_DUST_LIMIT);
+ expect(acceptTx.outputs[2].token?.atoms).to.equal(leftoverTokens);
+ expect(acceptTx.outputs[2].sats).to.equal(DEFAULT_DUST_LIMIT);
expect(acceptTx.outputs[2].outputScript.slice(0, 4)).to.equal(
'a914',
);
// 3rd output is tokens to taker
- expect(acceptTx.outputs[3].token?.amount).to.equal(
- testCase.acceptedTokens.toString(),
+ expect(acceptTx.outputs[3].token?.atoms).to.equal(
+ testCase.acceptedTokens,
);
- expect(acceptTx.outputs[3].value).to.equal(DEFAULT_DUST_LIMIT);
+ expect(acceptTx.outputs[3].sats).to.equal(DEFAULT_DUST_LIMIT);
expect(acceptTx.outputs[3].outputScript).to.equal(takerScriptHex);
// Offer is now modified
const newOffers = await agora.activeOffersByTokenId(
@@ -549,17 +547,13 @@
const cancelTxSer = newOffer
.cancelTx({
cancelSk: makerSk,
- fuelInputs: await makeBuilderInputs([
- Number(cancelFeeSats),
- ]),
+ fuelInputs: await makeBuilderInputs([cancelFeeSats]),
recipientScript: makerScript,
})
.ser();
const cancelTxid = (await chronik.broadcastTx(cancelTxSer)).txid;
const cancelTx = await chronik.tx(cancelTxid);
- expect(cancelTx.outputs[1].token?.amount).to.equal(
- leftoverTokens.toString(),
- );
+ expect(cancelTx.outputs[1].token?.atoms).to.equal(leftoverTokens);
expect(cancelTx.outputs[1].outputScript).to.equal(makerScriptHex);
// takerIndex is 2 for full accept, 3 for partial accept
@@ -567,9 +561,9 @@
// Get takenInfo from offer creation params
const takenInfo: TakenInfo = {
- satoshisPaid: testCase.askedSats,
+ satoshisPaid: BigInt(testCase.askedSats),
takerScriptHex: acceptTx.outputs[takerIndex].outputScript,
- baseTokens: testCase.acceptedTokens.toString(),
+ atoms: testCase.acceptedTokens,
};
// Tx history by token ID
@@ -619,8 +613,8 @@
const askedSats = agoraPartial.askedSats(thisTestCase.acceptedTokens);
const requiredSats = askedSats + 2000n;
const [fuelInput, takerInput] = await makeBuilderInputs([
- 4000,
- Number(requiredSats),
+ 4000n,
+ requiredSats,
]);
const offer = await makeAlpOffer({
@@ -686,8 +680,8 @@
const askedSats = agoraPartial.askedSats(acceptedTokens);
const requiredSats = askedSats + 2000n;
const [fuelInput, takerInput] = await makeBuilderInputs([
- 4000,
- Number(requiredSats),
+ 4000n,
+ requiredSats,
]);
const offer = await makeAlpOffer({
diff --git a/modules/ecash-agora/tests/partial.locktime.test.ts b/modules/ecash-agora/tests/partial.locktime.test.ts
--- a/modules/ecash-agora/tests/partial.locktime.test.ts
+++ b/modules/ecash-agora/tests/partial.locktime.test.ts
@@ -29,7 +29,7 @@
// This test needs a lot of sats
const NUM_COINS = 500;
-const COIN_VALUE = 1000000;
+const COIN_VALUE = 1000000n;
const BASE_PARAMS_ALP = {
tokenId: '00'.repeat(32), // filled in later
@@ -53,17 +53,17 @@
let chronik: ChronikClient;
async function makeBuilderInputs(
- values: number[],
+ values: bigint[],
): Promise<TxBuilderInput[]> {
const txid = await runner.sendToScript(values, makerScript);
- return values.map((value, outIdx) => ({
+ return values.map((sats, outIdx) => ({
input: {
prevOut: {
txid,
outIdx,
},
signData: {
- value,
+ sats,
outputScript: makerScript,
},
},
@@ -95,11 +95,11 @@
const askedSats = agoraPartial.askedSats(100n);
const requiredSats = askedSats + 2000n;
const [fuelInput, takerInput] = await makeBuilderInputs([
- 8000,
- Number(requiredSats),
+ 8000n,
+ requiredSats,
]);
- const genesisOutputSats = 2000;
+ const genesisOutputSats = 2000n;
const genesisTx = makeAlpGenesis({
tokenType: agoraPartial.tokenType,
fuelInput,
@@ -109,9 +109,9 @@
agoraPartial.offeredTokens(),
],
extraOutputs: [
- { value: genesisOutputSats, script: makerScript },
- { value: genesisOutputSats, script: makerScript },
- { value: genesisOutputSats, script: makerScript },
+ { sats: genesisOutputSats, script: makerScript },
+ { sats: genesisOutputSats, script: makerScript },
+ { sats: genesisOutputSats, script: makerScript },
],
});
const genesisTxid = (await chronik.broadcastTx(genesisTx.ser())).txid;
@@ -134,7 +134,7 @@
outIdx: offerIdx + 1,
},
signData: {
- value: genesisOutputSats,
+ sats: genesisOutputSats,
outputScript: makerScript,
},
},
@@ -143,7 +143,7 @@
],
outputs: [
{
- value: 0,
+ sats: 0n,
script: emppScript([
offerPartial.adPushdata(),
alpSend(tokenId, offerPartial.tokenType, [
@@ -151,7 +151,7 @@
]),
]),
},
- { value: 546, script: agoraP2sh },
+ { sats: 546n, script: agoraP2sh },
],
});
const offerTx = txBuildOffer.sign();
@@ -223,7 +223,7 @@
const acceptTx = await chronik.tx(acceptTxid);
expect(acceptTx.tokenEntries[0].burnSummary).to.equal(
- 'Unexpected burn: Burns 1000 base tokens',
+ 'Unexpected burn: Burns 1000 atoms',
);
});
});
diff --git a/modules/ecash-agora/tests/partial.slp.bigsats.test.ts b/modules/ecash-agora/tests/partial.slp.bigsats.test.ts
--- a/modules/ecash-agora/tests/partial.slp.bigsats.test.ts
+++ b/modules/ecash-agora/tests/partial.slp.bigsats.test.ts
@@ -33,7 +33,7 @@
dustAmount: DEFAULT_DUST_LIMIT,
};
-const BIGSATS = 149 * 5000000000 - 20000;
+const BIGSATS = BigInt(149 * 5000000000 - 20000);
const ecc = new Ecc();
const makerSk = fromHex('33'.repeat(32));
@@ -49,17 +49,17 @@
async function makeBuilderInputs(
runner: TestRunner,
- values: number[],
+ values: bigint[],
): Promise<TxBuilderInput[]> {
const txid = await runner.sendToScript(values, makerScript);
- return values.map((value, outIdx) => ({
+ return values.map((sats, outIdx) => ({
input: {
prevOut: {
txid,
outIdx,
},
signData: {
- value,
+ sats,
outputScript: makerScript,
},
},
@@ -74,7 +74,7 @@
before(async () => {
runner = await TestRunner.setup('setup_scripts/ecash-agora_base');
chronik = runner.chronik;
- await runner.setupCoins(1, BIGSATS + 11000);
+ await runner.setupCoins(1, BIGSATS + 11000n);
});
after(() => {
@@ -83,7 +83,7 @@
it('Agora Partial 7450M XEC vs 2p64-1 full accept', async () => {
const [fuelInput, takerInput] = await makeBuilderInputs(runner, [
- 10000,
+ 10000n,
BIGSATS,
]);
@@ -142,22 +142,20 @@
expect(acceptTx.outputs[0].outputScript).to.equal(
toHex(
slpSend(agoraPartial.tokenId, agoraPartial.tokenType, [
- 0,
+ 0n,
agoraPartial.offeredTokens(),
]).bytecode,
),
);
- expect(acceptTx.outputs[0].value).to.equal(0);
+ expect(acceptTx.outputs[0].sats).to.equal(0n);
expect(acceptTx.outputs[0].token).to.equal(undefined);
// 1st output is sats to maker
expect(acceptTx.outputs[1].token).to.equal(undefined);
- expect(acceptTx.outputs[1].value).to.equal(738825273344);
+ expect(acceptTx.outputs[1].sats).to.equal(738825273344n);
expect(acceptTx.outputs[1].outputScript).to.equal(makerScriptHex);
// 2nd output is tokens to taker
- expect(acceptTx.outputs[2].token?.amount).to.equal(
- 0xffffff0000000000n.toString(),
- );
- expect(acceptTx.outputs[2].value).to.equal(DEFAULT_DUST_LIMIT);
+ expect(acceptTx.outputs[2].token?.atoms).to.equal(0xffffff0000000000n);
+ expect(acceptTx.outputs[2].sats).to.equal(DEFAULT_DUST_LIMIT);
expect(acceptTx.outputs[2].outputScript).to.equal(takerScriptHex);
});
});
@@ -169,7 +167,7 @@
before(async () => {
runner = await TestRunner.setup('setup_scripts/ecash-agora_base');
chronik = runner.chronik;
- await runner.setupCoins(1, BIGSATS + 11000);
+ await runner.setupCoins(1, BIGSATS + 11000n);
});
after(() => {
@@ -178,7 +176,7 @@
it('Agora Partial 7450M XEC vs 2p64-1 small accept', async () => {
const [fuelInput, takerInput] = await makeBuilderInputs(runner, [
- 10000,
+ 10000n,
BIGSATS,
]);
@@ -238,29 +236,27 @@
expect(acceptTx.outputs[0].outputScript).to.equal(
toHex(
slpSend(agoraPartial.tokenId, agoraPartial.tokenType, [
- 0,
+ 0n,
agoraPartial.offeredTokens() - acceptedTokens,
acceptedTokens,
]).bytecode,
),
);
- expect(acceptTx.outputs[0].value).to.equal(0);
+ expect(acceptTx.outputs[0].sats).to.equal(0n);
expect(acceptTx.outputs[0].token).to.equal(undefined);
// 1st output is sats to maker
expect(acceptTx.outputs[1].token).to.equal(undefined);
- expect(acceptTx.outputs[1].value).to.equal(549755813888);
+ expect(acceptTx.outputs[1].sats).to.equal(549755813888n);
expect(acceptTx.outputs[1].outputScript).to.equal(makerScriptHex);
// 2nd output is back to the P2SH Script
- expect(acceptTx.outputs[2].token?.amount).to.equal(
- (agoraPartial.offeredTokens() - acceptedTokens).toString(),
+ expect(acceptTx.outputs[2].token?.atoms).to.equal(
+ agoraPartial.offeredTokens() - acceptedTokens,
);
- expect(acceptTx.outputs[2].value).to.equal(DEFAULT_DUST_LIMIT);
+ expect(acceptTx.outputs[2].sats).to.equal(DEFAULT_DUST_LIMIT);
expect(acceptTx.outputs[2].outputScript.slice(0, 4)).to.equal('a914');
// 3rd output is tokens to taker
- expect(acceptTx.outputs[3].token?.amount).to.equal(
- acceptedTokens.toString(),
- );
- expect(acceptTx.outputs[3].value).to.equal(DEFAULT_DUST_LIMIT);
+ expect(acceptTx.outputs[3].token?.atoms).to.equal(acceptedTokens);
+ expect(acceptTx.outputs[3].sats).to.equal(DEFAULT_DUST_LIMIT);
expect(acceptTx.outputs[3].outputScript).to.equal(takerScriptHex);
});
});
@@ -272,7 +268,7 @@
before(async () => {
runner = await TestRunner.setup('setup_scripts/ecash-agora_base');
chronik = runner.chronik;
- await runner.setupCoins(1, BIGSATS + 11000);
+ await runner.setupCoins(1, BIGSATS + 11000n);
});
after(() => {
@@ -281,7 +277,7 @@
it('Agora Partial 7450M XEC vs 2p63-1 full accept', async () => {
const [fuelInput, takerInput] = await makeBuilderInputs(runner, [
- 10000,
+ 10000n,
BIGSATS,
]);
@@ -342,22 +338,20 @@
expect(acceptTx.outputs[0].outputScript).to.equal(
toHex(
slpSend(agoraPartial.tokenId, agoraPartial.tokenType, [
- 0,
+ 0n,
acceptedTokens,
]).bytecode,
),
);
- expect(acceptTx.outputs[0].value).to.equal(0);
+ expect(acceptTx.outputs[0].sats).to.equal(0n);
expect(acceptTx.outputs[0].token).to.equal(undefined);
// 1st output is sats to maker
expect(acceptTx.outputs[1].token).to.equal(undefined);
- expect(acceptTx.outputs[1].value).to.equal(740723589120);
+ expect(acceptTx.outputs[1].sats).to.equal(740723589120n);
expect(acceptTx.outputs[1].outputScript).to.equal(makerScriptHex);
// 2nd output is tokens to taker
- expect(acceptTx.outputs[2].token?.amount).to.equal(
- acceptedTokens.toString(),
- );
- expect(acceptTx.outputs[2].value).to.equal(DEFAULT_DUST_LIMIT);
+ expect(acceptTx.outputs[2].token?.atoms).to.equal(acceptedTokens);
+ expect(acceptTx.outputs[2].sats).to.equal(DEFAULT_DUST_LIMIT);
expect(acceptTx.outputs[2].outputScript).to.equal(takerScriptHex);
});
});
@@ -369,7 +363,7 @@
before(async () => {
runner = await TestRunner.setup('setup_scripts/ecash-agora_base');
chronik = runner.chronik;
- await runner.setupCoins(1, BIGSATS + 11000);
+ await runner.setupCoins(1, BIGSATS + 11000n);
});
after(() => {
@@ -378,7 +372,7 @@
it('Agora Partial 7450M XEC vs 2p63-1 small accept', async () => {
const [fuelInput, takerInput] = await makeBuilderInputs(runner, [
- 10000,
+ 10000n,
BIGSATS,
]);
@@ -438,29 +432,27 @@
expect(acceptTx.outputs[0].outputScript).to.equal(
toHex(
slpSend(agoraPartial.tokenId, agoraPartial.tokenType, [
- 0,
+ 0n,
agoraPartial.offeredTokens() - acceptedTokens,
acceptedTokens,
]).bytecode,
),
);
- expect(acceptTx.outputs[0].value).to.equal(0);
+ expect(acceptTx.outputs[0].sats).to.equal(0n);
expect(acceptTx.outputs[0].token).to.equal(undefined);
// 1st output is sats to maker
expect(acceptTx.outputs[1].token).to.equal(undefined);
- expect(acceptTx.outputs[1].value).to.equal(4294967296);
+ expect(acceptTx.outputs[1].sats).to.equal(4294967296n);
expect(acceptTx.outputs[1].outputScript).to.equal(makerScriptHex);
// 2nd output is back to the P2SH Script
- expect(acceptTx.outputs[2].token?.amount).to.equal(
- (agoraPartial.offeredTokens() - acceptedTokens).toString(),
+ expect(acceptTx.outputs[2].token?.atoms).to.equal(
+ agoraPartial.offeredTokens() - acceptedTokens,
);
- expect(acceptTx.outputs[2].value).to.equal(DEFAULT_DUST_LIMIT);
+ expect(acceptTx.outputs[2].sats).to.equal(DEFAULT_DUST_LIMIT);
expect(acceptTx.outputs[2].outputScript.slice(0, 4)).to.equal('a914');
// 3rd output is tokens to taker
- expect(acceptTx.outputs[3].token?.amount).to.equal(
- acceptedTokens.toString(),
- );
- expect(acceptTx.outputs[3].value).to.equal(DEFAULT_DUST_LIMIT);
+ expect(acceptTx.outputs[3].token?.atoms).to.equal(acceptedTokens);
+ expect(acceptTx.outputs[3].sats).to.equal(DEFAULT_DUST_LIMIT);
expect(acceptTx.outputs[3].outputScript).to.equal(takerScriptHex);
});
});
@@ -472,7 +464,7 @@
before(async () => {
runner = await TestRunner.setup('setup_scripts/ecash-agora_base');
chronik = runner.chronik;
- await runner.setupCoins(1, BIGSATS + 11000);
+ await runner.setupCoins(1, BIGSATS + 11000n);
});
after(() => {
@@ -481,7 +473,7 @@
it('Agora Partial 7450M XEC vs 100 full accept', async () => {
const [fuelInput, takerInput] = await makeBuilderInputs(runner, [
- 10000,
+ 10000n,
BIGSATS,
]);
@@ -535,19 +527,21 @@
// 0th output is OP_RETURN SLP SEND
expect(acceptTx.outputs[0].outputScript).to.equal(
toHex(
- slpSend(agoraPartial.tokenId, agoraPartial.tokenType, [0, 100n])
- .bytecode,
+ slpSend(agoraPartial.tokenId, agoraPartial.tokenType, [
+ 0n,
+ 100n,
+ ]).bytecode,
),
);
- expect(acceptTx.outputs[0].value).to.equal(0);
+ expect(acceptTx.outputs[0].sats).to.equal(0n);
expect(acceptTx.outputs[0].token).to.equal(undefined);
// 1st output is sats to maker
expect(acceptTx.outputs[1].token).to.equal(undefined);
- expect(acceptTx.outputs[1].value).to.equal(712360591360);
+ expect(acceptTx.outputs[1].sats).to.equal(712360591360n);
expect(acceptTx.outputs[1].outputScript).to.equal(makerScriptHex);
// 2nd output is tokens to taker
- expect(acceptTx.outputs[2].token?.amount).to.equal('100');
- expect(acceptTx.outputs[2].value).to.equal(DEFAULT_DUST_LIMIT);
+ expect(acceptTx.outputs[2].token?.atoms).to.equal(100n);
+ expect(acceptTx.outputs[2].sats).to.equal(DEFAULT_DUST_LIMIT);
expect(acceptTx.outputs[2].outputScript).to.equal(takerScriptHex);
});
});
@@ -559,7 +553,7 @@
before(async () => {
runner = await TestRunner.setup('setup_scripts/ecash-agora_base');
chronik = runner.chronik;
- await runner.setupCoins(1, BIGSATS + 11000);
+ await runner.setupCoins(1, BIGSATS + 11000n);
});
after(() => {
@@ -568,7 +562,7 @@
it('Agora Partial 7450M XEC vs 100 small accept', async () => {
const [fuelInput, takerInput] = await makeBuilderInputs(runner, [
- 10000,
+ 10000n,
BIGSATS,
]);
@@ -619,25 +613,25 @@
expect(acceptTx.outputs[0].outputScript).to.equal(
toHex(
slpSend(agoraPartial.tokenId, agoraPartial.tokenType, [
- 0,
+ 0n,
99n,
1n,
]).bytecode,
),
);
- expect(acceptTx.outputs[0].value).to.equal(0);
+ expect(acceptTx.outputs[0].sats).to.equal(0n);
expect(acceptTx.outputs[0].token).to.equal(undefined);
// 1st output is sats to maker
expect(acceptTx.outputs[1].token).to.equal(undefined);
- expect(acceptTx.outputs[1].value).to.equal(712964571136);
+ expect(acceptTx.outputs[1].sats).to.equal(712964571136n);
expect(acceptTx.outputs[1].outputScript).to.equal(makerScriptHex);
// 2nd output is back to the P2SH Script
- expect(acceptTx.outputs[2].token?.amount).to.equal('99');
- expect(acceptTx.outputs[2].value).to.equal(DEFAULT_DUST_LIMIT);
+ expect(acceptTx.outputs[2].token?.atoms).to.equal(99n);
+ expect(acceptTx.outputs[2].sats).to.equal(DEFAULT_DUST_LIMIT);
expect(acceptTx.outputs[2].outputScript.slice(0, 4)).to.equal('a914');
// 3rd output is tokens to taker
- expect(acceptTx.outputs[3].token?.amount).to.equal('1');
- expect(acceptTx.outputs[3].value).to.equal(DEFAULT_DUST_LIMIT);
+ expect(acceptTx.outputs[3].token?.atoms).to.equal(1n);
+ expect(acceptTx.outputs[3].sats).to.equal(DEFAULT_DUST_LIMIT);
expect(acceptTx.outputs[3].outputScript).to.equal(takerScriptHex);
});
});
diff --git a/modules/ecash-agora/tests/partial.slp.test.ts b/modules/ecash-agora/tests/partial.slp.test.ts
--- a/modules/ecash-agora/tests/partial.slp.test.ts
+++ b/modules/ecash-agora/tests/partial.slp.test.ts
@@ -28,7 +28,7 @@
// This test needs a lot of sats
const NUM_COINS = 500;
-const COIN_VALUE = 1100000000;
+const COIN_VALUE = 1100000000n;
const BASE_PARAMS_SLP = {
tokenId: '00'.repeat(32), // filled in later
@@ -54,17 +54,17 @@
let chronik: ChronikClient;
async function makeBuilderInputs(
- values: number[],
+ values: bigint[],
): Promise<TxBuilderInput[]> {
const txid = await runner.sendToScript(values, makerScript);
- return values.map((value, outIdx) => ({
+ return values.map((sats, outIdx) => ({
input: {
prevOut: {
txid,
outIdx,
},
signData: {
- value,
+ sats,
outputScript: makerScript,
},
},
@@ -485,8 +485,8 @@
const askedSats = agoraPartial.askedSats(testCase.acceptedTokens);
const requiredSats = askedSats + 2000n;
const [fuelInput, takerInput] = await makeBuilderInputs([
- 4000,
- Number(requiredSats),
+ 4000n,
+ requiredSats,
]);
const offer = await makeSlpOffer({
@@ -512,22 +512,22 @@
expect(acceptTx.outputs[0].outputScript).to.equal(
toHex(
slpSend(agoraPartial.tokenId, agoraPartial.tokenType, [
- 0,
+ 0n,
agoraPartial.offeredTokens(),
]).bytecode,
),
);
// 1st output is sats to maker
expect(acceptTx.outputs[1].token).to.equal(undefined);
- expect(acceptTx.outputs[1].value).to.equal(testCase.askedSats);
+ expect(acceptTx.outputs[1].sats).to.equal(testCase.askedSats);
expect(acceptTx.outputs[1].outputScript).to.equal(
makerScriptHex,
);
// 2nd output is tokens to taker
- expect(acceptTx.outputs[2].token?.amount).to.equal(
- offeredTokens.toString(),
+ expect(acceptTx.outputs[2].token?.atoms).to.equal(
+ offeredTokens,
);
- expect(acceptTx.outputs[2].value).to.equal(DEFAULT_DUST_LIMIT);
+ expect(acceptTx.outputs[2].sats).to.equal(DEFAULT_DUST_LIMIT);
expect(acceptTx.outputs[2].outputScript).to.equal(
takerScriptHex,
);
@@ -547,7 +547,7 @@
expect(acceptTx.outputs[0].outputScript).to.equal(
toHex(
slpSend(agoraPartial.tokenId, agoraPartial.tokenType, [
- 0,
+ 0n,
leftoverTokens,
testCase.acceptedTokens,
]).bytecode,
@@ -555,21 +555,19 @@
);
// 1st output is sats to maker
expect(acceptTx.outputs[1].token).to.equal(undefined);
- expect(acceptTx.outputs[1].value).to.equal(testCase.askedSats);
+ expect(acceptTx.outputs[1].sats).to.equal(testCase.askedSats);
expect(acceptTx.outputs[1].outputScript).to.equal(makerScriptHex);
// 2nd output is back to the P2SH Script
- expect(acceptTx.outputs[2].token?.amount).to.equal(
- leftoverTokens.toString(),
- );
- expect(acceptTx.outputs[2].value).to.equal(DEFAULT_DUST_LIMIT);
+ expect(acceptTx.outputs[2].token?.atoms).to.equal(leftoverTokens);
+ expect(acceptTx.outputs[2].sats).to.equal(DEFAULT_DUST_LIMIT);
expect(acceptTx.outputs[2].outputScript.slice(0, 4)).to.equal(
'a914',
);
// 3rd output is tokens to taker
- expect(acceptTx.outputs[3].token?.amount).to.equal(
- testCase.acceptedTokens.toString(),
+ expect(acceptTx.outputs[3].token?.atoms).to.equal(
+ testCase.acceptedTokens,
);
- expect(acceptTx.outputs[3].value).to.equal(DEFAULT_DUST_LIMIT);
+ expect(acceptTx.outputs[3].sats).to.equal(DEFAULT_DUST_LIMIT);
expect(acceptTx.outputs[3].outputScript).to.equal(takerScriptHex);
// Offer is now modified
const newOffers = await agora.activeOffersByTokenId(
@@ -593,17 +591,13 @@
const cancelTxSer = newOffer
.cancelTx({
cancelSk: makerSk,
- fuelInputs: await makeBuilderInputs([
- Number(cancelFeeSats),
- ]),
+ fuelInputs: await makeBuilderInputs([cancelFeeSats]),
recipientScript: makerScript,
})
.ser();
const cancelTxid = (await chronik.broadcastTx(cancelTxSer)).txid;
const cancelTx = await chronik.tx(cancelTxid);
- expect(cancelTx.outputs[1].token?.amount).to.equal(
- leftoverTokens.toString(),
- );
+ expect(cancelTx.outputs[1].token?.atoms).to.equal(leftoverTokens);
expect(cancelTx.outputs[1].outputScript).to.equal(makerScriptHex);
// takerIndex is 2 for full accept, 3 for partial accept
@@ -611,9 +605,9 @@
// Get takenInfo from offer creation params
const takenInfo: TakenInfo = {
- satoshisPaid: testCase.askedSats,
+ satoshisPaid: BigInt(testCase.askedSats),
takerScriptHex: acceptTx.outputs[takerIndex].outputScript,
- baseTokens: testCase.acceptedTokens.toString(),
+ atoms: testCase.acceptedTokens,
};
// Tx history by token ID
@@ -664,8 +658,8 @@
const askedSats = agoraPartial.askedSats(thisTestCase.acceptedTokens);
const requiredSats = askedSats + 2000n;
const [fuelInput, takerInput] = await makeBuilderInputs([
- 4000,
- Number(requiredSats),
+ 4000n,
+ requiredSats,
]);
const offer = await makeSlpOffer({
@@ -722,8 +716,8 @@
const askedSats = agoraPartial.askedSats(acceptedTokens);
const requiredSats = askedSats + 2000n;
const [fuelInput, takerInput] = await makeBuilderInputs([
- 4000,
- Number(requiredSats),
+ 4000n,
+ requiredSats,
]);
const offer = await makeSlpOffer({
diff --git a/modules/ecash-lib/README.md b/modules/ecash-lib/README.md
--- a/modules/ecash-lib/README.md
+++ b/modules/ecash-lib/README.md
@@ -90,3 +90,4 @@
- 1.5.0 - Support custom WASM URL and module [D17622](https://reviews.bitcoinabc.org/D17622)
- 1.5.1 - `Address.withPrefix()` returns same prefix if unchanged (instead of throwing an error) [D17623](https://reviews.bitcoinabc.org/D17623)
- 2.0.0 - Remove `initWasm`, auto-load the WebAssembly instead. Remove unneeded `ecc` parameters, esp. in `TxBuilder.sign` and `HdNode.fromSeed` [D17639](https://reviews.bitcoinabc.org/D17639) [D17640](https://reviews.bitcoinabc.org/D17640)
+- 3.0.0 - Improve types and shapes in line with chronik proto updates [D17650](https://reviews.bitcoinabc.org/D17650)
diff --git a/modules/ecash-lib/package-lock.json b/modules/ecash-lib/package-lock.json
--- a/modules/ecash-lib/package-lock.json
+++ b/modules/ecash-lib/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "ecash-lib",
- "version": "2.0.0",
+ "version": "3.0.0",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "ecash-lib",
- "version": "2.0.0",
+ "version": "3.0.0",
"license": "MIT",
"dependencies": {
"b58-ts": "file:../b58-ts",
diff --git a/modules/ecash-lib/package.json b/modules/ecash-lib/package.json
--- a/modules/ecash-lib/package.json
+++ b/modules/ecash-lib/package.json
@@ -1,6 +1,6 @@
{
"name": "ecash-lib",
- "version": "2.0.0",
+ "version": "3.0.0",
"description": "Library for eCash transaction building",
"main": "./dist/indexNodeJs.js",
"browser": "./dist/indexBrowser.js",
diff --git a/modules/ecash-lib/src/consts.ts b/modules/ecash-lib/src/consts.ts
--- a/modules/ecash-lib/src/consts.ts
+++ b/modules/ecash-lib/src/consts.ts
@@ -3,6 +3,6 @@
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
/** Default dust limit on the eCash network. */
-export const DEFAULT_DUST_LIMIT = 546;
+export const DEFAULT_DUST_LIMIT = 546n;
/** Default fee per kB on the eCash network. */
export const DEFAULT_FEE_PER_KB = 1000;
diff --git a/modules/ecash-lib/src/test/testRunner.ts b/modules/ecash-lib/src/test/testRunner.ts
--- a/modules/ecash-lib/src/test/testRunner.ts
+++ b/modules/ecash-lib/src/test/testRunner.ts
@@ -5,7 +5,6 @@
import type { ChronikClient } from 'chronik-client';
import type { ChildProcess } from 'node:child_process';
-import { Ecc } from '../ecc.js';
import { shaRmd160 } from '../hash.js';
import { fromHex, toHex } from '../io/hex.js';
import { pushBytesOp } from '../op.js';
@@ -26,7 +25,7 @@
public runner: ChildProcess;
public chronik: ChronikClient;
private coinsTxid: string | undefined;
- private coinValue: number | undefined;
+ private coinValue: bigint | undefined;
private lastUsedOutIdx: number;
private constructor(runner: ChildProcess, chronik: ChronikClient) {
@@ -120,7 +119,7 @@
public async setupCoins(
numCoins: number,
- coinValue: number,
+ coinValue: bigint,
): Promise<void> {
const opTrueScriptHash = shaRmd160(OP_TRUE_SCRIPT.bytecode);
const utxos = (
@@ -135,19 +134,19 @@
sequence: 0xffffffff,
})),
});
- const utxosValue = utxos.reduce((a, b) => a + b.value, 0);
+ const utxosValue = utxos.reduce((a, b) => a + b.sats, 0n);
for (let i = 0; i < numCoins; ++i) {
tx.outputs.push({
- value: coinValue,
+ sats: coinValue,
script: anyoneP2sh,
});
}
tx.outputs.push({
- value: 0,
+ sats: 0n,
script: Script.fromOps([OP_RETURN]),
});
- tx.outputs[tx.outputs.length - 1].value =
- utxosValue - numCoins * coinValue - tx.serSize();
+ tx.outputs[tx.outputs.length - 1].sats =
+ utxosValue - BigInt(numCoins) * coinValue - BigInt(tx.serSize());
this.coinsTxid = (await this.chronik.broadcastTx(tx.ser())).txid;
this.coinValue = coinValue;
@@ -164,11 +163,11 @@
}
public async sendToScript(
- value: number | number[],
+ sats: bigint | bigint[],
script: Script,
): Promise<string> {
const coinValue = this.coinValue!;
- const values = Array.isArray(value) ? value : [value];
+ const satsArr = Array.isArray(sats) ? sats : [sats];
const setupTxBuilder = new TxBuilder({
inputs: [
{
@@ -177,17 +176,20 @@
script: ANYONE_SCRIPT_SIG,
sequence: 0xffffffff,
signData: {
- value: coinValue,
+ sats: coinValue,
},
},
},
],
outputs: [
- ...values.map(value => ({ value, script })),
+ ...satsArr.map(sats => ({ sats, script })),
Script.fromOps([OP_RETURN]), // burn leftover
],
});
- const setupTx = setupTxBuilder.sign({ feePerKb: 1000, dustLimit: 546 });
+ const setupTx = setupTxBuilder.sign({
+ feePerKb: 1000,
+ dustLimit: 546n,
+ });
return (await this.chronik.broadcastTx(setupTx.ser())).txid;
}
diff --git a/modules/ecash-lib/src/token/alp.ts b/modules/ecash-lib/src/token/alp.ts
--- a/modules/ecash-lib/src/token/alp.ts
+++ b/modules/ecash-lib/src/token/alp.ts
@@ -7,7 +7,7 @@
import { Writer } from '../io/writer.js';
import { WriterBytes } from '../io/writerbytes.js';
import { WriterLength } from '../io/writerlength.js';
-import { Amount, BURN, GENESIS, GenesisInfo, MINT, SEND } from './common.js';
+import { Atoms, BURN, GENESIS, GenesisInfo, MINT, SEND } from './common.js';
/** LOKAD ID for ALP */
export const ALP_LOKAD_ID = strToBytes('SLP2');
@@ -15,12 +15,12 @@
/** ALP standard token type number */
export const ALP_STANDARD = 0;
-/** Mint data specifying mint amounts and batons of a GENESIS/MINT tx */
+/** Mint data specifying mint amounts (in atoms) and batons of a GENESIS/MINT tx */
export interface MintData {
/**
* List of amounts to be minted by this tx, each having their own tx output.
*/
- amounts: Amount[];
+ amounts: Atoms[];
/** Number of mint batons to create, each having their own tx output. */
numBatons: number;
}
@@ -80,7 +80,7 @@
export function alpSend(
tokenId: string,
tokenType: number,
- sendAmounts: Amount[],
+ sendAmounts: Atoms[],
): Uint8Array {
const tokenIdBytes = fromHexRev(tokenId);
const writeSection = (writer: Writer) => {
@@ -105,7 +105,7 @@
export function alpBurn(
tokenId: string,
tokenType: number,
- burnAmount: Amount,
+ burnAmount: Atoms,
): Uint8Array {
const tokenIdBytes = fromHexRev(tokenId);
const writeSection = (writer: Writer) => {
@@ -131,7 +131,7 @@
writer.putU8(mintData.numBatons);
}
-function putAlpAmount(amount: Amount, writer: Writer) {
+function putAlpAmount(amount: Atoms, writer: Writer) {
const amountN = BigInt(amount);
writer.putU32(amountN & 0xffffffffn);
writer.putU16(amountN >> 32n);
diff --git a/modules/ecash-lib/src/token/common.ts b/modules/ecash-lib/src/token/common.ts
--- a/modules/ecash-lib/src/token/common.ts
+++ b/modules/ecash-lib/src/token/common.ts
@@ -4,7 +4,7 @@
import { strToBytes } from '../io/str.js';
-export type Amount = bigint | number;
+export type Atoms = bigint;
export const GENESIS = strToBytes('GENESIS');
export const MINT = strToBytes('MINT');
diff --git a/modules/ecash-lib/src/token/slp.test.ts b/modules/ecash-lib/src/token/slp.test.ts
--- a/modules/ecash-lib/src/token/slp.test.ts
+++ b/modules/ecash-lib/src/token/slp.test.ts
@@ -8,12 +8,12 @@
describe('SLP', () => {
it('SLP invalid usage', () => {
- expect(() => slpGenesis(99, {}, 0)).to.throw('Unknown token type 99');
- expect(() => slpMint('', 77, 0)).to.throw('Unknown token type 77');
- expect(() => slpMint('', 1, 0)).to.throw(
+ expect(() => slpGenesis(99, {}, 0n)).to.throw('Unknown token type 99');
+ expect(() => slpMint('', 77, 0n)).to.throw('Unknown token type 77');
+ expect(() => slpMint('', 1, 0n)).to.throw(
'Token ID must be 64 hex characters in length, but got 0',
);
- expect(() => slpMint('1'.repeat(64), 1, -1)).to.throw(
+ expect(() => slpMint('1'.repeat(64), 1, -1n)).to.throw(
'Amount out of range: -1',
);
expect(() => slpMint('1'.repeat(64), 1, 0x10000000000000000n)).to.throw(
@@ -28,7 +28,7 @@
expect(() => slpMintVault('1'.repeat(64), new Array(20))).to.throw(
'Cannot use more than 19 amounts, but got 20',
);
- expect(() => slpMintVault('1'.repeat(64), [-1])).to.throw(
+ expect(() => slpMintVault('1'.repeat(64), [-1n])).to.throw(
'Amount out of range: -1',
);
expect(() =>
@@ -44,17 +44,17 @@
expect(() => slpSend('1'.repeat(64), 1, new Array(20))).to.throw(
'Cannot use more than 19 amounts, but got 20',
);
- expect(() => slpSend('1'.repeat(64), 1, [-1])).to.throw(
+ expect(() => slpSend('1'.repeat(64), 1, [-1n])).to.throw(
'Amount out of range: -1',
);
expect(() =>
slpSend('1'.repeat(64), 1, [0x10000000000000000n]),
).to.throw('Amount out of range: 18446744073709551616');
- expect(() => slpBurn('', 55, 0)).to.throw('Unknown token type 55');
- expect(() => slpBurn('', 1, 0)).to.throw(
+ expect(() => slpBurn('', 55, 0n)).to.throw('Unknown token type 55');
+ expect(() => slpBurn('', 1, 0n)).to.throw(
'Token ID must be 64 hex characters in length, but got 0',
);
- expect(() => slpBurn('1'.repeat(64), 1, -1)).to.throw(
+ expect(() => slpBurn('1'.repeat(64), 1, -1n)).to.throw(
'Amount out of range: -1',
);
expect(() => slpBurn('1'.repeat(64), 1, 0x10000000000000000n)).to.throw(
diff --git a/modules/ecash-lib/src/token/slp.ts b/modules/ecash-lib/src/token/slp.ts
--- a/modules/ecash-lib/src/token/slp.ts
+++ b/modules/ecash-lib/src/token/slp.ts
@@ -7,7 +7,7 @@
import { Op, pushBytesOp } from '../op.js';
import { OP_PUSHDATA1, OP_RETURN } from '../opcode.js';
import { Script } from '../script.js';
-import { Amount, BURN, GENESIS, GenesisInfo, MINT, SEND } from './common.js';
+import { Atoms, BURN, GENESIS, GenesisInfo, MINT, SEND } from './common.js';
/** LOKAD ID for SLP */
export const SLP_LOKAD_ID = strToBytes('SLP\0');
@@ -25,7 +25,7 @@
export function slpGenesis(
tokenType: number,
genesisInfo: GenesisInfo,
- initialQuantity: Amount,
+ initialQuantity: Atoms,
mintBatonOutIdx?: number,
): Script {
verifyTokenType(tokenType);
@@ -64,7 +64,7 @@
export function slpMint(
tokenId: string,
tokenType: number,
- additionalQuantity: Amount,
+ additionalQuantity: Atoms,
mintBatonOutIdx?: number,
): Script {
verifyTokenType(tokenType);
@@ -90,7 +90,7 @@
**/
export function slpMintVault(
tokenId: string,
- additionalQuantities: Amount[],
+ additionalQuantities: Atoms[],
): Script {
verifyTokenId(tokenId);
verifySendAmounts(additionalQuantities);
@@ -113,7 +113,7 @@
export function slpSend(
tokenId: string,
tokenType: number,
- sendAmounts: Amount[],
+ sendAmounts: Atoms[],
): Script {
verifyTokenType(tokenType);
verifyTokenId(tokenId);
@@ -136,7 +136,7 @@
export function slpBurn(
tokenId: string,
tokenType: number,
- burnAmount: Amount,
+ burnAmount: Atoms,
): Script {
verifyTokenType(tokenType);
verifyTokenId(tokenId);
@@ -170,7 +170,7 @@
}
}
-function verifySendAmounts(sendAmounts: Amount[]) {
+function verifySendAmounts(sendAmounts: Atoms[]) {
if (sendAmounts.length == 0) {
throw new Error('Send amount cannot be empty');
}
@@ -197,7 +197,7 @@
return pushBytesOp(pushdata);
}
-export function slpAmount(amount: Amount): Uint8Array {
+export function slpAmount(amount: Atoms): Uint8Array {
if (amount < 0 || BigInt(amount) > 0xffffffffffffffffn) {
throw new Error(`Amount out of range: ${amount}`);
}
diff --git a/modules/ecash-lib/src/tx.test.ts b/modules/ecash-lib/src/tx.test.ts
--- a/modules/ecash-lib/src/tx.test.ts
+++ b/modules/ecash-lib/src/tx.test.ts
@@ -80,15 +80,15 @@
],
outputs: [
{
- value: 0x2134,
+ sats: 0x2134n,
script: new Script(fromHex('1133557799')),
},
{
- value: 0x8079685746352413n,
+ sats: 0x8079685746352413n,
script: new Script(fromHex('564738291092837465')),
},
{
- value: 0,
+ sats: 0n,
script: new Script(fromHex('6a68656c6c6f')),
},
],
diff --git a/modules/ecash-lib/src/tx.ts b/modules/ecash-lib/src/tx.ts
--- a/modules/ecash-lib/src/tx.ts
+++ b/modules/ecash-lib/src/tx.ts
@@ -49,7 +49,7 @@
/** CTxOut, creating a new output. */
export interface TxOutput {
/** Value in satoshis of the output (1 XEC = 100 satoshis) */
- value: number | bigint;
+ sats: bigint;
/** Script locking the output */
script: Script;
}
@@ -57,7 +57,7 @@
/** All the data required to sign an input (using BIP143). */
export interface SignData {
/** Value of the output being spent */
- value: number | bigint;
+ sats: bigint;
/** Script of the output being spent (not for P2SH) */
outputScript?: Script;
/**
@@ -120,10 +120,10 @@
}
export function readTxOutput(bytes: Bytes): TxOutput {
- const value = bytes.readU64();
+ const sats = bytes.readU64();
const script = Script.readWithSize(bytes);
return {
- value,
+ sats,
script,
};
}
@@ -147,7 +147,7 @@
/** Write a TxOutput to a Writer */
export function writeTxOutput(output: TxOutput, writer: Writer): void {
- writer.putU64(output.value);
+ writer.putU64(output.sats);
output.script.writeWithSize(writer);
}
@@ -164,7 +164,7 @@
script: input.script?.copy(),
sequence: input.sequence,
signData: input.signData && {
- value: input.signData.value,
+ sats: input.signData.sats,
outputScript: input.signData.outputScript?.copy(),
redeemScript: input.signData.redeemScript?.copy(),
},
@@ -174,7 +174,7 @@
/** Create a deep copy of the TxOutput */
export function copyTxOutput(output: TxOutput): TxOutput {
return {
- value: output.value,
+ sats: output.sats,
script: output.script.copy(),
};
}
diff --git a/modules/ecash-lib/src/txBuilder.ts b/modules/ecash-lib/src/txBuilder.ts
--- a/modules/ecash-lib/src/txBuilder.ts
+++ b/modules/ecash-lib/src/txBuilder.ts
@@ -84,7 +84,7 @@
if (input.input.signData === undefined) {
return undefined;
}
- inputSum += BigInt(input.input.signData.value);
+ inputSum += BigInt(input.input.signData.sats);
}
return inputSum;
}
@@ -108,11 +108,11 @@
}
leftoverIdx = idx;
outputs[idx] = {
- value: 0, // placeholder
+ sats: 0n, // placeholder
script: builderOutput.copy(),
};
} else {
- fixedOutputSum += BigInt(builderOutput.value);
+ fixedOutputSum += BigInt(builderOutput.sats);
outputs[idx] = copyTxOutput(builderOutput);
}
}
@@ -123,7 +123,7 @@
public sign(params?: {
ecc?: Ecc;
feePerKb?: number;
- dustLimit?: number;
+ dustLimit?: bigint;
}): Tx {
const ecc = params?.ecc ?? new Ecc();
const { fixedOutputSum, leftoverIdx, outputs } = this.prepareOutputs();
@@ -147,7 +147,7 @@
const inputSum = this.inputSum();
if (inputSum === undefined) {
throw new Error(
- 'Using a leftover output requires setting SignData.value for all inputs',
+ 'Using a leftover output requires setting SignData.sats for all inputs',
);
}
if (params?.feePerKb === undefined) {
@@ -175,8 +175,8 @@
updateSignatories(new EccDummy(), dummyUnsignedTx);
let txSize = dummyUnsignedTx.tx.serSize();
let txFee = calcTxFee(txSize, params.feePerKb);
- const leftoverValue = inputSum - (fixedOutputSum + txFee);
- if (leftoverValue < params.dustLimit) {
+ const leftoverSats = inputSum - (fixedOutputSum + txFee);
+ if (leftoverSats < params.dustLimit) {
// inputs cannot pay for a dust leftover -> remove & recalc
outputs.splice(leftoverIdx, 1);
dummyUnsignedTx.tx.outputs = outputs;
@@ -185,11 +185,11 @@
txSize = dummyUnsignedTx.tx.serSize();
txFee = calcTxFee(txSize, params.feePerKb);
} else {
- outputs[leftoverIdx].value = leftoverValue;
+ outputs[leftoverIdx].sats = leftoverSats;
}
if (inputSum < fixedOutputSum + txFee) {
throw new Error(
- `Insufficient input value (${inputSum}): Can only pay for ${
+ `Insufficient input sats (${inputSum}): Can only pay for ${
inputSum - fixedOutputSum
} fees, but ${txFee} required`,
);
diff --git a/modules/ecash-lib/src/unsignedTx.test.ts b/modules/ecash-lib/src/unsignedTx.test.ts
--- a/modules/ecash-lib/src/unsignedTx.test.ts
+++ b/modules/ecash-lib/src/unsignedTx.test.ts
@@ -40,7 +40,7 @@
script: new Script(),
sequence: 0x87654321,
signData: {
- value: 0x123456789,
+ sats: 0x123456789n,
outputScript: new Script(fromHex('abacadaeafb0abac')),
},
},
@@ -52,22 +52,22 @@
script: new Script(),
sequence: 0x10605,
signData: {
- value: 0x9876,
+ sats: 0x9876n,
redeemScript: new Script(fromHex('ab778899ac55')),
},
},
],
outputs: [
{
- value: 0x2134,
+ sats: 0x2134n,
script: new Script(fromHex('1133557799')),
},
{
- value: 0x8079685746352413n,
+ sats: 0x8079685746352413n,
script: new Script(fromHex('564738291092837465')),
},
{
- value: 0,
+ sats: 0n,
script: new Script(fromHex('6a68656c6c6f')),
},
],
@@ -801,7 +801,7 @@
prevOut: TX.inputs[0].prevOut,
script: new Script(),
sequence: 0,
- signData: { value: 0 },
+ signData: { sats: 0n },
},
],
}),
@@ -819,7 +819,7 @@
script: new Script(),
sequence: 0,
signData: {
- value: 0,
+ sats: 0n,
outputScript: Script.p2sh(new Uint8Array(20)),
},
},
diff --git a/modules/ecash-lib/src/unsignedTx.ts b/modules/ecash-lib/src/unsignedTx.ts
--- a/modules/ecash-lib/src/unsignedTx.ts
+++ b/modules/ecash-lib/src/unsignedTx.ts
@@ -146,7 +146,7 @@
idx != inputIdx
) {
// Do not lock-in the txout payee at other indices as txin
- writeTxOutput({ value: 0, script: new Script() }, writer);
+ writeTxOutput({ sats: 0n, script: new Script() }, writer);
} else {
writeTxOutput(tx.outputs[idx], writer);
}
@@ -288,7 +288,7 @@
}
writeOutPoint(input.prevOut, writer);
scriptCode.writeWithSize(writer);
- writer.putU64(signData.value);
+ writer.putU64(signData.sats);
writer.putU32(input.sequence ?? DEFAULT_SEQUENCE);
writer.putBytes(hashOutputs);
writer.putU32(tx.locktime);
diff --git a/modules/ecash-lib/tests/alp.test.ts b/modules/ecash-lib/tests/alp.test.ts
--- a/modules/ecash-lib/tests/alp.test.ts
+++ b/modules/ecash-lib/tests/alp.test.ts
@@ -23,7 +23,7 @@
import '../src/initNodeJs.js';
const NUM_COINS = 500;
-const COIN_VALUE = 100000;
+const COIN_VALUE = 100000n;
const ALP_TOKEN_TYPE_STANDARD = {
number: 0,
@@ -67,7 +67,7 @@
const pkh4 = shaRmd160(pk4);
const p2pkh4 = Script.p2pkh(pkh4);
- await runner.sendToScript(50000, p2pkh1);
+ await runner.sendToScript(50000n, p2pkh1);
const utxos = await chronik.script('p2pkh', toHex(pkh1)).utxos();
expect(utxos.utxos.length).to.equal(1);
@@ -79,7 +79,7 @@
input: {
prevOut: utxo.outpoint,
signData: {
- value: utxo.value,
+ sats: utxo.sats,
outputScript: p2pkh1,
},
},
@@ -88,7 +88,7 @@
],
outputs: [
{
- value: 0,
+ sats: 0n,
script: emppScript([
alpGenesis(
ALP_STANDARD,
@@ -101,15 +101,15 @@
decimals: 4,
},
{
- amounts: [2000, 2],
+ amounts: [2000n, 2n],
numBatons: 1,
},
),
]),
},
- { value: 10000, script: p2pkh2 },
- { value: 10000, script: p2pkh1 },
- { value: 10000, script: p2pkh1 },
+ { sats: 10000n, script: p2pkh2 },
+ { sats: 10000n, script: p2pkh1 },
+ { sats: 10000n, script: p2pkh1 },
],
});
const genesisTx = txBuildGenesis.sign();
@@ -139,10 +139,10 @@
},
blockHeight: -1,
isCoinbase: false,
- value: 10000,
+ sats: 10000n,
isFinal: false,
token: {
- amount: '2000',
+ atoms: 2000n,
isMintBaton: false,
tokenId: tokenId,
tokenType: ALP_TOKEN_TYPE_STANDARD,
@@ -159,7 +159,7 @@
outIdx: 3,
},
signData: {
- value: 10000,
+ sats: 10000n,
outputScript: p2pkh1,
},
},
@@ -168,16 +168,16 @@
],
outputs: [
{
- value: 0,
+ sats: 0n,
script: emppScript([
alpMint(tokenId, ALP_STANDARD, {
- amounts: [500],
+ amounts: [500n],
numBatons: 1,
}),
]),
},
- { value: 546, script: p2pkh1 },
- { value: 546, script: p2pkh3 },
+ { sats: 546n, script: p2pkh1 },
+ { sats: 546n, script: p2pkh3 },
],
});
const mintTx = txBuildMint.sign();
@@ -192,10 +192,10 @@
},
blockHeight: -1,
isCoinbase: false,
- value: 546,
+ sats: 546n,
isFinal: false,
token: {
- amount: '0',
+ atoms: 0n,
isMintBaton: true,
tokenId: tokenId,
tokenType: ALP_TOKEN_TYPE_STANDARD,
@@ -212,7 +212,7 @@
outIdx: 1,
},
signData: {
- value: 546,
+ sats: 546n,
outputScript: p2pkh1,
},
},
@@ -225,7 +225,7 @@
outIdx: 1,
},
signData: {
- value: 10000,
+ sats: 10000n,
outputScript: p2pkh2,
},
},
@@ -234,25 +234,25 @@
],
outputs: [
{
- value: 0,
+ sats: 0n,
script: emppScript([
alpGenesis(
ALP_STANDARD,
{},
{
- amounts: [100, 0],
+ amounts: [100n, 0n],
numBatons: 1,
},
),
// OK to push 01 (not encoded as OP_1)
fromHex('01'),
- alpSend(tokenId, ALP_STANDARD, [0, 1000, 0, 1500]),
+ alpSend(tokenId, ALP_STANDARD, [0n, 1000n, 0n, 1500n]),
]),
},
- { value: 546, script: p2pkh1 },
- { value: 546, script: p2pkh2 },
- { value: 546, script: p2pkh3 },
- { value: 546, script: p2pkh4 },
+ { sats: 546n, script: p2pkh1 },
+ { sats: 546n, script: p2pkh2 },
+ { sats: 546n, script: p2pkh3 },
+ { sats: 546n, script: p2pkh4 },
],
});
const multiTx = txBuildMulti.sign();
@@ -269,13 +269,13 @@
prevOut: multiTx.inputs[0].prevOut,
sequenceNo: 0xffffffff,
token: {
- amount: '500',
+ atoms: 500n,
entryIdx: 1,
isMintBaton: false,
tokenId: tokenId,
tokenType: ALP_TOKEN_TYPE_STANDARD,
},
- value: 546,
+ sats: 546n,
},
{
inputScript: toHex(multiTx.inputs[1].script!.bytecode),
@@ -283,63 +283,63 @@
prevOut: multiTx.inputs[1].prevOut,
sequenceNo: 0xffffffff,
token: {
- amount: '2000',
+ atoms: 2000n,
entryIdx: 1,
isMintBaton: false,
tokenId: tokenId,
tokenType: ALP_TOKEN_TYPE_STANDARD,
},
- value: 10000,
+ sats: 10000n,
},
],
outputs: [
{
- value: 0,
+ sats: 0n,
outputScript: toHex(multiTx.outputs[0].script.bytecode),
},
{
outputScript: toHex(p2pkh1.bytecode),
token: {
- amount: '100',
+ atoms: 100n,
entryIdx: 0,
isMintBaton: false,
tokenId: multiTxid,
tokenType: ALP_TOKEN_TYPE_STANDARD,
},
- value: 546,
+ sats: 546n,
},
{
outputScript: toHex(p2pkh2.bytecode),
token: {
- amount: '1000',
+ atoms: 1000n,
entryIdx: 1,
isMintBaton: false,
tokenId: tokenId,
tokenType: ALP_TOKEN_TYPE_STANDARD,
},
- value: 546,
+ sats: 546n,
},
{
outputScript: toHex(p2pkh3.bytecode),
token: {
- amount: '0',
+ atoms: 0n,
entryIdx: 0,
isMintBaton: true,
tokenId: multiTxid,
tokenType: ALP_TOKEN_TYPE_STANDARD,
},
- value: 546,
+ sats: 546n,
},
{
outputScript: toHex(p2pkh4.bytecode),
token: {
- amount: '1500',
+ atoms: 1500n,
entryIdx: 1,
isMintBaton: false,
tokenId: tokenId,
tokenType: ALP_TOKEN_TYPE_STANDARD,
},
- value: 546,
+ sats: 546n,
},
],
lockTime: 0,
@@ -349,22 +349,22 @@
isFinal: false,
tokenEntries: [
{
- actualBurnAmount: '0',
+ actualBurnAtoms: 0n,
burnSummary: '',
burnsMintBatons: false,
failedColorings: [],
- intentionalBurn: '0',
+ intentionalBurnAtoms: 0n,
isInvalid: false,
tokenId: multiTxid,
tokenType: ALP_TOKEN_TYPE_STANDARD,
txType: 'GENESIS',
},
{
- actualBurnAmount: '0',
+ actualBurnAtoms: 0n,
burnSummary: '',
burnsMintBatons: false,
failedColorings: [],
- intentionalBurn: '0',
+ intentionalBurnAtoms: 0n,
isInvalid: false,
tokenId: tokenId,
tokenType: ALP_TOKEN_TYPE_STANDARD,
diff --git a/modules/ecash-lib/tests/slp.test.ts b/modules/ecash-lib/tests/slp.test.ts
--- a/modules/ecash-lib/tests/slp.test.ts
+++ b/modules/ecash-lib/tests/slp.test.ts
@@ -29,7 +29,7 @@
import '../src/initNodeJs.js';
const NUM_COINS = 500;
-const COIN_VALUE = 100000;
+const COIN_VALUE = 100000n;
const SLP_TOKEN_TYPE_FUNGIBLE = {
number: 1,
@@ -91,7 +91,7 @@
const pkh4 = shaRmd160(pk4);
const p2pkh4 = Script.p2pkh(pkh4);
- await runner.sendToScript(50000, p2pkh1);
+ await runner.sendToScript(50000n, p2pkh1);
const utxos = await chronik.script('p2pkh', toHex(pkh1)).utxos();
expect(utxos.utxos.length).to.equal(1);
@@ -103,7 +103,7 @@
input: {
prevOut: utxo.outpoint,
signData: {
- value: utxo.value,
+ sats: utxo.sats,
outputScript: p2pkh1,
},
},
@@ -112,7 +112,7 @@
],
outputs: [
{
- value: 0,
+ sats: 0n,
script: slpGenesis(
SLP_FUNGIBLE,
{
@@ -122,12 +122,12 @@
hash: '0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef',
decimals: 4,
},
- 2000,
+ 2000n,
2,
),
},
- { value: 10000, script: p2pkh2 },
- { value: 10000, script: p2pkh1 },
+ { sats: 10000n, script: p2pkh2 },
+ { sats: 10000n, script: p2pkh1 },
],
});
const genesisTx = txBuildGenesis.sign();
@@ -156,10 +156,10 @@
},
blockHeight: -1,
isCoinbase: false,
- value: 10000,
+ sats: 10000n,
isFinal: false,
token: {
- amount: '2000',
+ atoms: 2000n,
isMintBaton: false,
tokenId: tokenId,
tokenType: SLP_TOKEN_TYPE_FUNGIBLE,
@@ -176,7 +176,7 @@
outIdx: 2,
},
signData: {
- value: 10000,
+ sats: 10000n,
outputScript: p2pkh1,
},
},
@@ -185,11 +185,11 @@
],
outputs: [
{
- value: 0,
- script: slpMint(tokenId, SLP_FUNGIBLE, 500, 2),
+ sats: 0n,
+ script: slpMint(tokenId, SLP_FUNGIBLE, 500n, 2),
},
- { value: 546, script: p2pkh1 },
- { value: 546, script: p2pkh3 },
+ { sats: 546n, script: p2pkh1 },
+ { sats: 546n, script: p2pkh3 },
],
});
const mintTx = txBuildMint.sign();
@@ -204,10 +204,10 @@
},
blockHeight: -1,
isCoinbase: false,
- value: 546,
+ sats: 546n,
isFinal: false,
token: {
- amount: '0',
+ atoms: 0n,
isMintBaton: true,
tokenId: tokenId,
tokenType: SLP_TOKEN_TYPE_FUNGIBLE,
@@ -224,7 +224,7 @@
outIdx: 1,
},
signData: {
- value: 546,
+ sats: 546n,
outputScript: p2pkh1,
},
},
@@ -237,7 +237,7 @@
outIdx: 1,
},
signData: {
- value: 10000,
+ sats: 10000n,
outputScript: p2pkh2,
},
},
@@ -246,11 +246,11 @@
],
outputs: [
{
- value: 0,
- script: slpSend(tokenId, SLP_FUNGIBLE, [1000, 1500]),
+ sats: 0n,
+ script: slpSend(tokenId, SLP_FUNGIBLE, [1000n, 1500n]),
},
- { value: 546, script: p2pkh2 },
- { value: 546, script: p2pkh4 },
+ { sats: 546n, script: p2pkh2 },
+ { sats: 546n, script: p2pkh4 },
],
});
const sendTx = txBuildSend.sign();
@@ -267,13 +267,13 @@
prevOut: sendTx.inputs[0].prevOut,
sequenceNo: 0xffffffff,
token: {
- amount: '500',
+ atoms: 500n,
entryIdx: 0,
isMintBaton: false,
tokenId: tokenId,
tokenType: SLP_TOKEN_TYPE_FUNGIBLE,
},
- value: 546,
+ sats: 546n,
},
{
inputScript: toHex(sendTx.inputs[1].script!.bytecode),
@@ -281,41 +281,41 @@
prevOut: sendTx.inputs[1].prevOut,
sequenceNo: 0xffffffff,
token: {
- amount: '2000',
+ atoms: 2000n,
entryIdx: 0,
isMintBaton: false,
tokenId: tokenId,
tokenType: SLP_TOKEN_TYPE_FUNGIBLE,
},
- value: 10000,
+ sats: 10000n,
},
],
outputs: [
{
- value: 0,
+ sats: 0n,
outputScript: toHex(sendTx.outputs[0].script.bytecode),
},
{
outputScript: toHex(p2pkh2.bytecode),
token: {
- amount: '1000',
+ atoms: 1000n,
entryIdx: 0,
isMintBaton: false,
tokenId: tokenId,
tokenType: SLP_TOKEN_TYPE_FUNGIBLE,
},
- value: 546,
+ sats: 546n,
},
{
outputScript: toHex(p2pkh4.bytecode),
token: {
- amount: '1500',
+ atoms: 1500n,
entryIdx: 0,
isMintBaton: false,
tokenId: tokenId,
tokenType: SLP_TOKEN_TYPE_FUNGIBLE,
},
- value: 546,
+ sats: 546n,
},
],
lockTime: 0,
@@ -325,11 +325,11 @@
isFinal: false,
tokenEntries: [
{
- actualBurnAmount: '0',
+ actualBurnAtoms: 0n,
burnSummary: '',
burnsMintBatons: false,
failedColorings: [],
- intentionalBurn: '0',
+ intentionalBurnAtoms: 0n,
isInvalid: false,
tokenId: tokenId,
tokenType: SLP_TOKEN_TYPE_FUNGIBLE,
@@ -349,7 +349,7 @@
outIdx: 1,
},
signData: {
- value: 546,
+ sats: 546n,
outputScript: p2pkh2,
},
},
@@ -358,8 +358,8 @@
],
outputs: [
{
- value: 0,
- script: slpBurn(tokenId, SLP_FUNGIBLE, 1000),
+ sats: 0n,
+ script: slpBurn(tokenId, SLP_FUNGIBLE, 1000n),
},
],
});
@@ -376,18 +376,18 @@
prevOut: burnTx.inputs[0].prevOut,
sequenceNo: 0xffffffff,
token: {
- amount: '1000',
+ atoms: 1000n,
entryIdx: 0,
isMintBaton: false,
tokenId: tokenId,
tokenType: SLP_TOKEN_TYPE_FUNGIBLE,
},
- value: 546,
+ sats: 546n,
},
],
outputs: [
{
- value: 0,
+ sats: 0n,
outputScript: toHex(burnTx.outputs[0].script.bytecode),
},
],
@@ -398,11 +398,11 @@
isFinal: false,
tokenEntries: [
{
- actualBurnAmount: '1000',
+ actualBurnAtoms: 1000n,
burnSummary: '',
burnsMintBatons: false,
failedColorings: [],
- intentionalBurn: '1000',
+ intentionalBurnAtoms: 1000n,
isInvalid: false,
tokenId: tokenId,
tokenType: SLP_TOKEN_TYPE_FUNGIBLE,
@@ -450,7 +450,7 @@
await ws.waitForOpen();
ws.subscribeToBlocks();
- await runner.sendToScript(50000, p2pkh1);
+ await runner.sendToScript(50000n, p2pkh1);
const utxos = await chronik.script('p2pkh', toHex(pkh1)).utxos();
expect(utxos.utxos.length).to.equal(1);
@@ -462,7 +462,7 @@
input: {
prevOut: utxo.outpoint,
signData: {
- value: utxo.value,
+ sats: utxo.sats,
outputScript: p2pkh1,
},
},
@@ -471,7 +471,7 @@
],
outputs: [
{
- value: 0,
+ sats: 0n,
script: slpGenesis(
SLP_MINT_VAULT,
{
@@ -482,10 +482,10 @@
mintVaultScripthash: toHex(mintVaultScripthash),
decimals: 4,
},
- 2000,
+ 2000n,
),
},
- { value: 10000, script: p2pkh2 },
+ { sats: 10000n, script: p2pkh2 },
],
});
const genesisTx = txBuildGenesis.sign();
@@ -515,10 +515,10 @@
},
blockHeight: -1,
isCoinbase: false,
- value: 10000,
+ sats: 10000n,
isFinal: false,
token: {
- amount: '2000',
+ atoms: 2000n,
isMintBaton: false,
tokenId: tokenId,
tokenType: SLP_TOKEN_TYPE_MINT_VAULT,
@@ -530,7 +530,7 @@
runner.generate();
await once(wsBlocks, 'BLK_CONNECTED');
- const mintVaultTxid = await runner.sendToScript(50000, mintVaultP2sh);
+ const mintVaultTxid = await runner.sendToScript(50000n, mintVaultP2sh);
const txBuildMint = new TxBuilder({
inputs: [
@@ -548,11 +548,11 @@
],
outputs: [
{
- value: 0,
- script: slpMintVault(tokenId, [500, 600]),
+ sats: 0n,
+ script: slpMintVault(tokenId, [500n, 600n]),
},
- { value: 546, script: p2pkh1 },
- { value: 546, script: p2pkh3 },
+ { sats: 546n, script: p2pkh1 },
+ { sats: 546n, script: p2pkh3 },
],
});
const mintTx = txBuildMint.sign();
@@ -567,10 +567,10 @@
},
blockHeight: -1,
isCoinbase: false,
- value: 546,
+ sats: 546n,
isFinal: false,
token: {
- amount: '600',
+ atoms: 600n,
isMintBaton: false,
tokenId: tokenId,
tokenType: SLP_TOKEN_TYPE_MINT_VAULT,
@@ -587,7 +587,7 @@
outIdx: 1,
},
signData: {
- value: 546,
+ sats: 546n,
outputScript: p2pkh1,
},
},
@@ -600,7 +600,7 @@
outIdx: 1,
},
signData: {
- value: 10000,
+ sats: 10000n,
outputScript: p2pkh2,
},
},
@@ -609,11 +609,11 @@
],
outputs: [
{
- value: 0,
- script: slpSend(tokenId, SLP_MINT_VAULT, [1000, 1500]),
+ sats: 0n,
+ script: slpSend(tokenId, SLP_MINT_VAULT, [1000n, 1500n]),
},
- { value: 546, script: p2pkh2 },
- { value: 546, script: p2pkh4 },
+ { sats: 546n, script: p2pkh2 },
+ { sats: 546n, script: p2pkh4 },
],
});
const sendTx = txBuildSend.sign();
@@ -630,13 +630,13 @@
prevOut: sendTx.inputs[0].prevOut,
sequenceNo: 0xffffffff,
token: {
- amount: '500',
+ atoms: 500n,
entryIdx: 0,
isMintBaton: false,
tokenId: tokenId,
tokenType: SLP_TOKEN_TYPE_MINT_VAULT,
},
- value: 546,
+ sats: 546n,
},
{
inputScript: toHex(sendTx.inputs[1].script!.bytecode),
@@ -644,41 +644,41 @@
prevOut: sendTx.inputs[1].prevOut,
sequenceNo: 0xffffffff,
token: {
- amount: '2000',
+ atoms: 2000n,
entryIdx: 0,
isMintBaton: false,
tokenId: tokenId,
tokenType: SLP_TOKEN_TYPE_MINT_VAULT,
},
- value: 10000,
+ sats: 10000n,
},
],
outputs: [
{
- value: 0,
+ sats: 0n,
outputScript: toHex(sendTx.outputs[0].script.bytecode),
},
{
outputScript: toHex(p2pkh2.bytecode),
token: {
- amount: '1000',
+ atoms: 1000n,
entryIdx: 0,
isMintBaton: false,
tokenId: tokenId,
tokenType: SLP_TOKEN_TYPE_MINT_VAULT,
},
- value: 546,
+ sats: 546n,
},
{
outputScript: toHex(p2pkh4.bytecode),
token: {
- amount: '1500',
+ atoms: 1500n,
entryIdx: 0,
isMintBaton: false,
tokenId: tokenId,
tokenType: SLP_TOKEN_TYPE_MINT_VAULT,
},
- value: 546,
+ sats: 546n,
},
],
lockTime: 0,
@@ -688,11 +688,11 @@
isFinal: false,
tokenEntries: [
{
- actualBurnAmount: '0',
+ actualBurnAtoms: 0n,
burnSummary: '',
burnsMintBatons: false,
failedColorings: [],
- intentionalBurn: '0',
+ intentionalBurnAtoms: 0n,
isInvalid: false,
tokenId: tokenId,
tokenType: SLP_TOKEN_TYPE_MINT_VAULT,
@@ -725,7 +725,7 @@
const pkh4 = shaRmd160(pk4);
const p2pkh4 = Script.p2pkh(pkh4);
- await runner.sendToScript(50000, p2pkh1);
+ await runner.sendToScript(50000n, p2pkh1);
const utxos = await chronik.script('p2pkh', toHex(pkh1)).utxos();
expect(utxos.utxos.length).to.equal(1);
@@ -737,7 +737,7 @@
input: {
prevOut: utxo.outpoint,
signData: {
- value: utxo.value,
+ sats: utxo.sats,
outputScript: p2pkh1,
},
},
@@ -746,7 +746,7 @@
],
outputs: [
{
- value: 0,
+ sats: 0n,
script: slpGenesis(
SLP_NFT1_GROUP,
{
@@ -756,12 +756,12 @@
hash: '000111222333444555666777888999aaabbbcccdddeeefff0001112223334444',
decimals: 4,
},
- 2000,
+ 2000n,
2,
),
},
- { value: 10000, script: p2pkh2 },
- { value: 10000, script: p2pkh1 },
+ { sats: 10000n, script: p2pkh2 },
+ { sats: 10000n, script: p2pkh1 },
],
});
const genesisTx = txBuildGenesisGroup.sign();
@@ -790,10 +790,10 @@
},
blockHeight: -1,
isCoinbase: false,
- value: 10000,
+ sats: 10000n,
isFinal: false,
token: {
- amount: '2000',
+ atoms: 2000n,
isMintBaton: false,
tokenId: tokenId,
tokenType: SLP_TOKEN_TYPE_NFT1_GROUP,
@@ -810,7 +810,7 @@
outIdx: 2,
},
signData: {
- value: 10000,
+ sats: 10000n,
outputScript: p2pkh1,
},
},
@@ -819,11 +819,11 @@
],
outputs: [
{
- value: 0,
- script: slpMint(tokenId, SLP_NFT1_GROUP, 500, 2),
+ sats: 0n,
+ script: slpMint(tokenId, SLP_NFT1_GROUP, 500n, 2),
},
- { value: 546, script: p2pkh1 },
- { value: 546, script: p2pkh3 },
+ { sats: 546n, script: p2pkh1 },
+ { sats: 546n, script: p2pkh3 },
],
});
const mintTx = txBuildMint.sign();
@@ -838,10 +838,10 @@
},
blockHeight: -1,
isCoinbase: false,
- value: 546,
+ sats: 546n,
isFinal: false,
token: {
- amount: '0',
+ atoms: 0n,
isMintBaton: true,
tokenId: tokenId,
tokenType: SLP_TOKEN_TYPE_NFT1_GROUP,
@@ -858,7 +858,7 @@
outIdx: 1,
},
signData: {
- value: 546,
+ sats: 546n,
outputScript: p2pkh1,
},
},
@@ -871,7 +871,7 @@
outIdx: 1,
},
signData: {
- value: 10000,
+ sats: 10000n,
outputScript: p2pkh2,
},
},
@@ -880,11 +880,11 @@
],
outputs: [
{
- value: 0,
- script: slpSend(tokenId, SLP_NFT1_GROUP, [1, 2499]),
+ sats: 0n,
+ script: slpSend(tokenId, SLP_NFT1_GROUP, [1n, 2499n]),
},
- { value: 8000, script: p2pkh2 },
- { value: 546, script: p2pkh4 },
+ { sats: 8000n, script: p2pkh2 },
+ { sats: 546n, script: p2pkh4 },
],
});
const sendTx = txBuildSend.sign();
@@ -901,13 +901,13 @@
prevOut: sendTx.inputs[0].prevOut,
sequenceNo: 0xffffffff,
token: {
- amount: '500',
+ atoms: 500n,
entryIdx: 0,
isMintBaton: false,
tokenId: tokenId,
tokenType: SLP_TOKEN_TYPE_NFT1_GROUP,
},
- value: 546,
+ sats: 546n,
},
{
inputScript: toHex(sendTx.inputs[1].script!.bytecode),
@@ -915,41 +915,41 @@
prevOut: sendTx.inputs[1].prevOut,
sequenceNo: 0xffffffff,
token: {
- amount: '2000',
+ atoms: 2000n,
entryIdx: 0,
isMintBaton: false,
tokenId: tokenId,
tokenType: SLP_TOKEN_TYPE_NFT1_GROUP,
},
- value: 10000,
+ sats: 10000n,
},
],
outputs: [
{
- value: 0,
+ sats: 0n,
outputScript: toHex(sendTx.outputs[0].script.bytecode),
},
{
outputScript: toHex(p2pkh2.bytecode),
token: {
- amount: '1',
+ atoms: 1n,
entryIdx: 0,
isMintBaton: false,
tokenId: tokenId,
tokenType: SLP_TOKEN_TYPE_NFT1_GROUP,
},
- value: 8000,
+ sats: 8000n,
},
{
outputScript: toHex(p2pkh4.bytecode),
token: {
- amount: '2499',
+ atoms: 2499n,
entryIdx: 0,
isMintBaton: false,
tokenId: tokenId,
tokenType: SLP_TOKEN_TYPE_NFT1_GROUP,
},
- value: 546,
+ sats: 546n,
},
],
lockTime: 0,
@@ -959,11 +959,11 @@
isFinal: false,
tokenEntries: [
{
- actualBurnAmount: '0',
+ actualBurnAtoms: 0n,
burnSummary: '',
burnsMintBatons: false,
failedColorings: [],
- intentionalBurn: '0',
+ intentionalBurnAtoms: 0n,
isInvalid: false,
tokenId: tokenId,
tokenType: SLP_TOKEN_TYPE_NFT1_GROUP,
@@ -983,7 +983,7 @@
outIdx: 1,
},
signData: {
- value: 8000,
+ sats: 8000n,
outputScript: p2pkh2,
},
},
@@ -992,7 +992,7 @@
],
outputs: [
{
- value: 0,
+ sats: 0n,
script: slpGenesis(
SLP_NFT1_CHILD,
{
@@ -1002,10 +1002,10 @@
hash: '0000111122223333444455556666777788889999aaaabbbbccccddddeeeeffff',
decimals: 0,
},
- 1,
+ 1n,
),
},
- { value: 6000, script: p2pkh1 },
+ { sats: 6000n, script: p2pkh1 },
],
});
const genesisChildTx = txBuildGenesisChild.sign();
@@ -1036,7 +1036,7 @@
outIdx: 1,
},
signData: {
- value: 6000,
+ sats: 6000n,
outputScript: p2pkh1,
},
},
@@ -1045,11 +1045,11 @@
],
outputs: [
{
- value: 0,
- script: slpSend(childTokenId, SLP_NFT1_CHILD, [0, 1]),
+ sats: 0n,
+ script: slpSend(childTokenId, SLP_NFT1_CHILD, [0n, 1n]),
},
- { value: 546, script: p2pkh2 },
- { value: 546, script: p2pkh4 },
+ { sats: 546n, script: p2pkh2 },
+ { sats: 546n, script: p2pkh4 },
],
});
const childSendTx = txBuildChildSend.sign();
@@ -1067,34 +1067,34 @@
prevOut: childSendTx.inputs[0].prevOut,
sequenceNo: 0xffffffff,
token: {
- amount: '1',
+ atoms: 1n,
entryIdx: 0,
isMintBaton: false,
tokenId: childTokenId,
tokenType: SLP_TOKEN_TYPE_NFT1_CHILD,
},
- value: 6000,
+ sats: 6000n,
},
],
outputs: [
{
- value: 0,
+ sats: 0n,
outputScript: toHex(childSendTx.outputs[0].script.bytecode),
},
{
outputScript: toHex(p2pkh2.bytecode),
- value: 546,
+ sats: 546n,
},
{
outputScript: toHex(p2pkh4.bytecode),
token: {
- amount: '1',
+ atoms: 1n,
entryIdx: 0,
isMintBaton: false,
tokenId: childTokenId,
tokenType: SLP_TOKEN_TYPE_NFT1_CHILD,
},
- value: 546,
+ sats: 546n,
},
],
lockTime: 0,
@@ -1104,11 +1104,11 @@
isFinal: false,
tokenEntries: [
{
- actualBurnAmount: '0',
+ actualBurnAtoms: 0n,
burnSummary: '',
burnsMintBatons: false,
failedColorings: [],
- intentionalBurn: '0',
+ intentionalBurnAtoms: 0n,
isInvalid: false,
tokenId: childTokenId,
tokenType: SLP_TOKEN_TYPE_NFT1_CHILD,
diff --git a/modules/ecash-lib/tests/txBuilder.test.ts b/modules/ecash-lib/tests/txBuilder.test.ts
--- a/modules/ecash-lib/tests/txBuilder.test.ts
+++ b/modules/ecash-lib/tests/txBuilder.test.ts
@@ -36,7 +36,7 @@
import '../src/initNodeJs.js';
const NUM_COINS = 500;
-const COIN_VALUE = 100000;
+const COIN_VALUE = 100000n;
const SIG_HASH_TYPES = [
ALL_BIP143,
@@ -92,8 +92,8 @@
);
// Send some UTXOs to the wallet
- await runner.sendToScript(90000, p2pkh);
- await runner.sendToScript(90000, p2pkh);
+ await runner.sendToScript(90000n, p2pkh);
+ await runner.sendToScript(90000n, p2pkh);
const utxos = await chronik.script('p2pkh', toHex(pkh)).utxos();
expect(utxos.utxos.length).to.equal(2);
@@ -104,7 +104,7 @@
input: {
prevOut: utxo.outpoint,
signData: {
- value: utxo.value,
+ sats: utxo.sats,
outputScript: p2pkh,
},
},
@@ -112,22 +112,22 @@
})),
outputs: [
// Recipient using a TxOutput
- { value: 120000, script: recipientScript },
+ { sats: 120000n, script: recipientScript },
// Recipient using a TxOutputAddress (p2pkh)
{
- value: 10000,
+ sats: 10000n,
script: Script.fromAddress(otherRecipientAddressP2pkh),
},
// Recipient using a TxOutputAddress (p2sh)
{
- value: 10000,
+ sats: 10000n,
script: Script.fromAddress(otherRecipientAddressP2sh),
},
// Leftover change back to wallet
p2pkh,
],
});
- const spendTx = txBuild.sign({ feePerKb: 1000, dustLimit: 546 });
+ const spendTx = txBuild.sign({ feePerKb: 1000, dustLimit: 546n });
const txid = (await chronik.broadcastTx(spendTx.ser())).txid;
// Now have 1 UTXO change in the wallet
@@ -140,7 +140,9 @@
},
blockHeight: -1,
isCoinbase: false,
- value: 90000 * 2 - 120000 - 10000 - 10000 - spendTx.serSize(),
+ sats: BigInt(
+ 90000 * 2 - 120000 - 10000 - 10000 - spendTx.serSize(),
+ ),
isFinal: false,
},
]);
@@ -155,7 +157,7 @@
const p2pkh = Script.p2pkh(pkh);
for (const sigHashType of SIG_HASH_TYPES) {
- const txid = await runner.sendToScript(90000, p2pkh);
+ const txid = await runner.sendToScript(90000n, p2pkh);
const txBuild = new TxBuilder({
inputs: [
{
@@ -165,7 +167,7 @@
outIdx: 0,
},
signData: {
- value: 90000,
+ sats: 90000n,
outputScript: p2pkh,
},
},
@@ -174,7 +176,7 @@
],
outputs: [p2pkh],
});
- const spendTx = txBuild.sign({ feePerKb: 1000, dustLimit: 546 });
+ const spendTx = txBuild.sign({ feePerKb: 1000, dustLimit: 546n });
await chronik.broadcastTx(spendTx.ser());
}
});
@@ -187,7 +189,7 @@
const p2pk = Script.fromOps([pushBytesOp(pk), OP_CHECKSIG]);
for (const sigHashType of SIG_HASH_TYPES) {
- const txid = await runner.sendToScript(90000, p2pk);
+ const txid = await runner.sendToScript(90000n, p2pk);
const txBuild = new TxBuilder({
inputs: [
{
@@ -197,7 +199,7 @@
outIdx: 0,
},
signData: {
- value: 90000,
+ sats: 90000n,
outputScript: p2pk,
},
},
@@ -206,7 +208,7 @@
],
outputs: [p2pk],
});
- const spendTx = txBuild.sign({ feePerKb: 1000, dustLimit: 546 });
+ const spendTx = txBuild.sign({ feePerKb: 1000, dustLimit: 546n });
await chronik.broadcastTx(spendTx.ser());
}
});
@@ -219,7 +221,7 @@
const p2pk = Script.fromOps([pushBytesOp(pk), OP_CHECKSIG]);
for (const sigHashType of SIG_HASH_TYPES) {
- const txid = await runner.sendToScript(90000, p2pk);
+ const txid = await runner.sendToScript(90000n, p2pk);
const txBuild = new TxBuilder({
inputs: [
{
@@ -230,7 +232,7 @@
},
sequence: 0x92345678,
signData: {
- value: 90000,
+ sats: 90000n,
outputScript: p2pk,
},
},
@@ -250,7 +252,7 @@
],
outputs: [p2pk],
});
- const spendTx = txBuild.sign({ feePerKb: 1000, dustLimit: 546 });
+ const spendTx = txBuild.sign({ feePerKb: 1000, dustLimit: 546n });
await chronik.broadcastTx(spendTx.ser());
}
});
@@ -268,7 +270,7 @@
const p2sh = Script.p2sh(shaRmd160(redeemScript.bytecode));
for (const sigHashType of SIG_HASH_TYPES) {
- const txid = await runner.sendToScript(90000, p2sh);
+ const txid = await runner.sendToScript(90000n, p2sh);
const txBuild = new TxBuilder({
inputs: [
{
@@ -278,7 +280,7 @@
outIdx: 0,
},
signData: {
- value: 90000,
+ sats: 90000n,
redeemScript,
},
},
@@ -302,7 +304,7 @@
],
outputs: [p2sh],
});
- const spendTx = txBuild.sign({ feePerKb: 1000, dustLimit: 546 });
+ const spendTx = txBuild.sign({ feePerKb: 1000, dustLimit: 546n });
await chronik.broadcastTx(spendTx.ser());
}
});
@@ -330,7 +332,7 @@
const p2sh = Script.p2sh(shaRmd160(redeemScript.bytecode));
for (const sigHashType of SIG_HASH_TYPES) {
- const txid = await runner.sendToScript(90000, p2sh);
+ const txid = await runner.sendToScript(90000n, p2sh);
const txBuild = new TxBuilder({
inputs: [
{
@@ -341,7 +343,7 @@
},
sequence: 0x98765432,
signData: {
- value: 90000,
+ sats: 90000n,
redeemScript,
},
},
@@ -375,7 +377,7 @@
],
outputs: [p2sh],
});
- const spendTx = txBuild.sign({ feePerKb: 1000, dustLimit: 546 });
+ const spendTx = txBuild.sign({ feePerKb: 1000, dustLimit: 546n });
await chronik.broadcastTx(spendTx.ser());
}
});
@@ -392,7 +394,7 @@
OP_CHECKSIG,
]);
const p2sh = Script.p2sh(shaRmd160(redeemScript.bytecode));
- const txid = await runner.sendToScript(90000, p2sh);
+ const txid = await runner.sendToScript(90000n, p2sh);
const txBuild = new TxBuilder({
inputs: [
{
@@ -402,7 +404,7 @@
outIdx: 0,
},
signData: {
- value: 90000,
+ sats: 90000n,
redeemScript,
},
},
@@ -428,77 +430,77 @@
],
outputs: [
{
- value: 20000,
+ sats: 20000n,
script: Script.p2pkh(shaRmd160(pk1)),
},
Script.p2pkh(shaRmd160(pk2)),
{
- value: 30000,
+ sats: 30000n,
script: Script.p2pkh(shaRmd160(pk2)),
},
],
});
// 0sats/kB (not broadcast)
- let spendTx = txBuild.sign({ feePerKb: 0, dustLimit: 546 });
- expect(spendTx.outputs[1].value).to.equal(40000n);
+ let spendTx = txBuild.sign({ feePerKb: 0, dustLimit: 546n });
+ expect(spendTx.outputs[1].sats).to.equal(40000n);
// 1ksats/kB
- spendTx = txBuild.sign({ feePerKb: 1000, dustLimit: 546 });
+ spendTx = txBuild.sign({ feePerKb: 1000, dustLimit: 546n });
await chronik.broadcastTx(spendTx.ser());
- expect(spendTx.outputs[1].value).to.equal(
+ expect(spendTx.outputs[1].sats).to.equal(
BigInt(40000 - spendTx.serSize()),
);
// 10ksats/kB
txBuild.inputs[0].input.prevOut.txid = await runner.sendToScript(
- 90000,
+ 90000n,
p2sh,
);
- spendTx = txBuild.sign({ feePerKb: 10000, dustLimit: 546 });
+ spendTx = txBuild.sign({ feePerKb: 10000, dustLimit: 546n });
await chronik.broadcastTx(spendTx.ser());
- expect(spendTx.outputs[1].value).to.equal(
+ expect(spendTx.outputs[1].sats).to.equal(
BigInt(40000 - 10 * spendTx.serSize()),
);
// 100ksats/kB
txBuild.inputs[0].input.prevOut.txid = await runner.sendToScript(
- 90000,
+ 90000n,
p2sh,
);
- spendTx = txBuild.sign({ feePerKb: 100000, dustLimit: 546 });
+ spendTx = txBuild.sign({ feePerKb: 100000, dustLimit: 546n });
await chronik.broadcastTx(spendTx.ser());
- expect(spendTx.outputs[1].value).to.equal(
+ expect(spendTx.outputs[1].sats).to.equal(
BigInt(40000 - 100 * spendTx.serSize()),
);
// 120ksats/kB, deletes leftover output
txBuild.inputs[0].input.prevOut.txid = await runner.sendToScript(
- 90000,
+ 90000n,
p2sh,
);
- spendTx = txBuild.sign({ feePerKb: 120000, dustLimit: 546 });
+ spendTx = txBuild.sign({ feePerKb: 120000, dustLimit: 546n });
await chronik.broadcastTx(spendTx.ser());
expect(spendTx.outputs.length).to.equal(2);
// 100ksats/kB with a 5000 dust limit deletes leftover too
txBuild.inputs[0].input.prevOut.txid = await runner.sendToScript(
- 90000,
+ 90000n,
p2sh,
);
- spendTx = txBuild.sign({ feePerKb: 100000, dustLimit: 5000 });
+ spendTx = txBuild.sign({ feePerKb: 100000, dustLimit: 5000n });
await chronik.broadcastTx(spendTx.ser());
expect(spendTx.outputs.length).to.equal(2);
// 1000ksats/kB does't have sufficient sats even without leftover
txBuild.inputs[0].input.prevOut.txid = await runner.sendToScript(
- 90000,
+ 90000n,
p2sh,
);
expect(() =>
- txBuild.sign({ feePerKb: 1000000, dustLimit: 546 }),
+ txBuild.sign({ feePerKb: 1000000, dustLimit: 546n }),
).to.throw(
- `Insufficient input value (90000): Can only pay for 40000 fees, ` +
+ `Insufficient input sats (90000): Can only pay for 40000 fees, ` +
`but ${spendTx.serSize() * 1000} required`,
);
});
@@ -520,7 +522,7 @@
OP_CHECKSIG,
]);
const p2sh = Script.p2sh(shaRmd160(redeemScript.bytecode));
- const txid = await runner.sendToScript(90000, p2sh);
+ const txid = await runner.sendToScript(90000n, p2sh);
const txBuild = new TxBuilder({
inputs: [
{
@@ -530,7 +532,7 @@
outIdx: 0,
},
signData: {
- value: 90000,
+ sats: 90000n,
redeemScript,
},
},
@@ -556,11 +558,11 @@
],
outputs: [
{
- value: 20000,
+ sats: 20000n,
script: Script.p2pkh(shaRmd160(pk1)),
},
{
- value: 30000,
+ sats: 30000n,
script: Script.p2pkh(shaRmd160(pk2)),
},
// Leftover (change) output is specified as Script
@@ -569,13 +571,13 @@
});
// 0sats/kB (not broadcast)
- let spendTx = txBuild.sign({ feePerKb: 0, dustLimit: 546 });
- expect(spendTx.outputs[2].value).to.equal(40000n);
+ let spendTx = txBuild.sign({ feePerKb: 0, dustLimit: 546n });
+ expect(spendTx.outputs[2].sats).to.equal(40000n);
// 1ksats/kB
- spendTx = txBuild.sign({ feePerKb: 1000, dustLimit: 546 });
+ spendTx = txBuild.sign({ feePerKb: 1000, dustLimit: 546n });
await chronik.broadcastTx(spendTx.ser());
- expect(spendTx.outputs[2].value).to.equal(
+ expect(spendTx.outputs[2].sats).to.equal(
BigInt(40000 - spendTx.serSize()),
);
});
@@ -592,11 +594,11 @@
txBuild.inputs.push({
input: {
prevOut: {
- txid: await runner.sendToScript(90000, p2pkh),
+ txid: await runner.sendToScript(90000n, p2pkh),
outIdx: 0,
},
signData: {
- value: 90000,
+ sats: 90000n,
outputScript: p2pkh,
},
},
@@ -606,17 +608,17 @@
txBuild.outputs.push(Script.p2pkh(shaRmd160(pk2)));
const txSize = 8896;
const extraOutput = {
- value: 90000 * 2 - (txSize + 252 * 546),
+ sats: BigInt(90000 * 2 - (txSize + 252 * 546)),
script: p2pkh,
};
txBuild.outputs.push(extraOutput);
for (let i = 0; i < 251; ++i) {
- txBuild.outputs.push({ value: 546, script: p2pkh });
+ txBuild.outputs.push({ sats: 546n, script: p2pkh });
}
expect(txBuild.outputs.length).to.equal(253);
- let spendTx = txBuild.sign({ feePerKb: 1000, dustLimit: 546 });
+ let spendTx = txBuild.sign({ feePerKb: 1000, dustLimit: 546n });
expect(spendTx.serSize()).to.equal(txSize);
- expect(spendTx.outputs[0].value).to.equal(BigInt(546));
+ expect(spendTx.outputs[0].sats).to.equal(BigInt(546));
// If we remove the leftover output from the tx, we also remove 2 extra
// bytes from the VARSIZE of the output, because 253 requires 3 bytes to
@@ -624,15 +626,17 @@
const p2pkhSize = 8 + 1 + 25;
const smallerSize = txSize - p2pkhSize - 2;
// We can add 2 extra sats for the VARSIZE savings and it's handled fine
- extraOutput.value += 546 + p2pkhSize + 2;
- spendTx = txBuild.sign({ feePerKb: 1000, dustLimit: 546 });
+ extraOutput.sats += 546n + BigInt(p2pkhSize) + 2n;
+ spendTx = txBuild.sign({ feePerKb: 1000, dustLimit: 546n });
expect(spendTx.serSize()).to.equal(smallerSize);
expect(spendTx.outputs.length).to.equal(252);
// Adding 1 extra sat -> fails -> showing that the previous tx was exact
- extraOutput.value += 1;
- expect(() => txBuild.sign({ feePerKb: 1000, dustLimit: 546 })).to.throw(
- `Insufficient input value (180000): Can only pay for ` +
+ extraOutput.sats += 1n;
+ expect(() =>
+ txBuild.sign({ feePerKb: 1000, dustLimit: 546n }),
+ ).to.throw(
+ `Insufficient input sats (180000): Can only pay for ` +
`${smallerSize - 1} fees, but ${smallerSize} required`,
);
});
@@ -656,7 +660,7 @@
outIdx: 0,
},
signData: {
- value: expectedSize,
+ sats: BigInt(expectedSize),
},
},
signatory: (_, input) => {
@@ -672,7 +676,7 @@
// Leftover script, but will be spliced out again
outputs: [new Script()],
});
- const tx = txBuild.sign({ feePerKb: 1000, dustLimit: 9999 });
+ const tx = txBuild.sign({ feePerKb: 1000, dustLimit: 9999n });
expect(tx.serSize()).to.equal(expectedSize);
});
@@ -690,10 +694,12 @@
],
outputs: [new Script()],
});
- expect(() => txBuild.sign({ feePerKb: 1000, dustLimit: 546 })).to.throw(
- 'Using a leftover output requires setting SignData.value for all inputs',
+ expect(() =>
+ txBuild.sign({ feePerKb: 1000, dustLimit: 546n }),
+ ).to.throw(
+ 'Using a leftover output requires setting SignData.sats for all inputs',
);
- txBuild.inputs[0].input.signData = { value: 1234 };
+ txBuild.inputs[0].input.signData = { sats: 1234n };
expect(() => txBuild.sign({ feePerKb: 1000 })).to.throw(
'Using a leftover output requires setting dustLimit',
);
diff --git a/test/functional/chronik_token_burn.py b/test/functional/chronik_token_burn.py
--- a/test/functional/chronik_token_burn.py
+++ b/test/functional/chronik_token_burn.py
@@ -203,7 +203,7 @@
token_id=genesis_alp.txid,
token_type=pb.TokenType(alp=pb.ALP_TOKEN_TYPE_STANDARD),
tx_type=pb.SEND,
- burn_summary="Unexpected burn: Burns 600 base tokens, but intended to burn 500; burned 100 too many",
+ burn_summary="Unexpected burn: Burns 600 atoms, but intended to burn 500; burned 100 too many",
actual_burn_amount="600",
intentional_burn=500,
),
@@ -217,7 +217,7 @@
txs.append(burn_alp)
burn_alp.send(
chronik,
- error=f"400: Tx {burn_alp.txid} failed token checks: Unexpected burn: Burns 600 base tokens, but intended to burn 500; burned 100 too many.",
+ error=f"400: Tx {burn_alp.txid} failed token checks: Unexpected burn: Burns 600 atoms, but intended to burn 500; burned 100 too many.",
)
burn_alp.test(chronik)
@@ -247,7 +247,7 @@
token_id=genesis_alp.txid,
token_type=pb.TokenType(alp=pb.ALP_TOKEN_TYPE_STANDARD),
is_invalid=True,
- burn_summary="Unexpected burn: Burns 400 base tokens",
+ burn_summary="Unexpected burn: Burns 400 atoms",
actual_burn_amount="400",
),
],
@@ -263,7 +263,7 @@
txs.append(bare_burn)
bare_burn.send(
chronik,
- error=f"400: Tx {bare_burn.txid} failed token checks: Unexpected burn: Burns mint baton(s). Unexpected burn: Burns 400 base tokens.",
+ error=f"400: Tx {bare_burn.txid} failed token checks: Unexpected burn: Burns mint baton(s). Unexpected burn: Burns 400 atoms.",
)
bare_burn.test(chronik)

File Metadata

Mime Type
text/plain
Expires
Tue, May 20, 23:03 (4 h, 45 m)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
5864332
Default Alt Text
D17650.id52583.diff (411 KB)

Event Timeline