Page Menu
Home
Phabricator
Search
Configure Global Search
Log In
Files
F12944962
D13358.id38638.diff
No One
Temporary
Actions
View File
Edit File
Delete File
View Transforms
Subscribe
Mute Notifications
Award Token
Flag For Later
Size
21 KB
Subscribers
None
D13358.id38638.diff
View Options
diff --git a/web/alias-server/package.json b/web/alias-server/package.json
--- a/web/alias-server/package.json
+++ b/web/alias-server/package.json
@@ -5,7 +5,8 @@
"main": "index.js",
"scripts": {
"test": "mocha",
- "mocks": "node scripts/generateMocks.js"
+ "mocks": "node scripts/generateMocks.js",
+ "live": "node scripts/aliasTestsLive.js"
},
"keywords": [
"ecash",
diff --git a/web/alias-server/scripts/aliasTestsLive.js b/web/alias-server/scripts/aliasTestsLive.js
new file mode 100644
--- /dev/null
+++ b/web/alias-server/scripts/aliasTestsLive.js
@@ -0,0 +1,254 @@
+const assert = require('assert');
+const fs = require('fs');
+const path = require('path');
+const config = require('../config');
+const {
+ getAliasTxs,
+ getValidAliasRegistrations,
+ getAliasStringsFromValidAliases,
+ getUnprocessedValidAliasRegistrations,
+} = require('../alias');
+const { getAllTxHistory, getUnprocessedTxHistory } = require('../chronik');
+const {
+ getTestableUnprocessedTxCounts,
+ prepareMocksForGetUnprocessedTxHistory,
+} = require('../utils');
+
+async function fetchAliasesAndTestApi(testMocks = true, liveTestCount = 3) {
+ // Directory for mocks. Relative to /scripts, ../test/mocks/generated/
+ const mocksDir = path.join(__dirname, '..', 'test', 'mocks', 'generated');
+
+ // Create directory if it does not exist
+ if (!fs.existsSync(mocksDir)) {
+ fs.mkdirSync(mocksDir);
+ }
+
+ /*
+ Script to test all alias functions with live API
+
+ Since the registered aliases will be constantly changing, the mocks on static unit tests
+ in alias-server will not keep up
+
+ This function can alert a dev if new txs in the dataset are causing problems
+
+ 1 - Get all tx history at alias address
+ 2 - Parse all of this history to get valid alias registrations
+ 3 - Split up all of this history into all possible permutations of partialtxhistory,
+ i.e. in batches by blockheight
+ 4 - Test all partial tx history processing functions against this
+ 5 - Create an updated mocks file for unit tests
+ */
+
+ // chronik tx history of alias registration address
+ const aliasTxHistory = await getAllTxHistory(
+ config.aliasConstants.registrationHash160,
+ );
+
+ /*
+ Parse aliasTxHistory to get all potential valid aliasRegistration txs
+ These alias txs
+ - Use valid characters
+ - Paid the correct fee for their bytesize
+ - Have the correct OP_RETURN prefix
+
+ However they are only potentialAliasTxs. To be valid, they must
+ - Have at least one confirmation
+ - Be the tx registering the given alias with the lowest blockheight
+ - If other alias txs conflict in the same block, have the alphabetically first txid
+ */
+ const allPotentialAliasTxs = getAliasTxs(
+ aliasTxHistory,
+ config.aliasConstants,
+ );
+
+ /*
+ All valid alias txs at alias registration address,
+ sorted by blockheight earliest to most recent, and txid
+ alphabetically.
+
+ NB unconfirmed txs have blockheight === 100,000,000
+ */
+
+ /*
+ validAliasObjects, pendingAliasObjects
+
+ validAliasObjects are registered aliases.
+ These will never change unless and until Phase 2 migration.
+
+ pendingAliasObjects are alias registrations that will be valid after the tx confirmed,
+ assuming no other tx with an alphabetically earlier txid comes into the same block
+ */
+
+ const aliasRegistrations = getValidAliasRegistrations(allPotentialAliasTxs);
+ const { validAliasTxs } = aliasRegistrations;
+
+ // Get the blockheight of the most recently registered alias tx
+ const highestRegisteredBlockheight =
+ validAliasTxs[validAliasTxs.length - 1].blockheight;
+ fs.writeFileSync(
+ `${mocksDir}/aliasRegistrations_${highestRegisteredBlockheight}.json`,
+ JSON.stringify(aliasRegistrations, null, 2),
+ 'utf-8',
+ );
+
+ /*
+ Get amounts of unprocessed txs that can be tested, i.e. amounts that correspond to
+ the number of txs at a given blockheight in aliasTxHistory
+ */
+ const testableUnprocessedTxCounts =
+ getTestableUnprocessedTxCounts(aliasTxHistory);
+
+ // Iterate over all unprocessedTxCount cutoff amounts of unprocessedTxs to test with mocks
+ let failedTestCount = 0;
+ if (testMocks) {
+ for (let i = 0; i < testableUnprocessedTxCounts.length; i += 1) {
+ const desiredUnprocessedTxs = testableUnprocessedTxCounts[i];
+ const {
+ processedBlockheight,
+ processedTxCount,
+ optionalMocks,
+ expectedResult,
+ } = prepareMocksForGetUnprocessedTxHistory(
+ desiredUnprocessedTxs,
+ aliasTxHistory,
+ );
+ const result = await getUnprocessedTxHistory(
+ config.aliasConstants.registrationHash160,
+ processedBlockheight,
+ processedTxCount,
+ optionalMocks,
+ );
+
+ try {
+ assert.deepEqual(result, expectedResult);
+ console.log(
+ '\x1b[32m%s\x1b[0m',
+ `✔ Passed getUnprocessedTxHistory() mock with ${desiredUnprocessedTxs} unprocessed txs of ${aliasTxHistory.length} total txs`,
+ );
+ } catch (err) {
+ console.log(
+ '\x1b[31m%s\x1b[0m',
+ `Failed getUnprocessedTxHistory() mock with ${desiredUnprocessedTxs} unprocessed txs of ${aliasTxHistory.length} total txs`,
+ );
+ failedTestCount += 1;
+ }
+ }
+ }
+ // Iterate over liveTestCount random unprocessedTxCounts and test with live API
+
+ const randomAmountsOfUnprocessedTxs = testableUnprocessedTxCounts
+ .sort(() => Math.random() - Math.random())
+ .slice(0, liveTestCount);
+
+ for (let i = 0; i < randomAmountsOfUnprocessedTxs.length; i += 1) {
+ // Live tests
+ const liveTestUnprocessedTxCount = randomAmountsOfUnprocessedTxs[i];
+ console.log(
+ `Live API test for ${liveTestUnprocessedTxCount} unprocessed txs:`,
+ );
+
+ // Treat aliasTxHistory as the full tx history
+ const liveTestProcessedTxs = aliasTxHistory.slice(
+ liveTestUnprocessedTxCount,
+ );
+ const liveTestProcessedTxCount = liveTestProcessedTxs.length;
+ const liveTestProcessedBlockheight =
+ liveTestProcessedTxs[0].block.height;
+
+ const liveTestUnprocessedTxs = await getUnprocessedTxHistory(
+ config.aliasConstants.registrationHash160,
+ liveTestProcessedBlockheight,
+ liveTestProcessedTxCount,
+ );
+
+ // Parse liveTestProcessedTxCount to simulate what would be in your database
+ const thisLiveTestProcessedPotentialAliasTxs = getAliasTxs(
+ liveTestProcessedTxs,
+ config.aliasConstants,
+ );
+ const thisLiveTestProcessedAliasRegistrations =
+ getValidAliasRegistrations(thisLiveTestProcessedPotentialAliasTxs);
+ const theseLiveTestProcessedValidAliasTxs =
+ thisLiveTestProcessedAliasRegistrations.validAliasTxs;
+
+ // Parse this live tests unprocessed txs to get its valid alias registrations
+ const thisLiveTestUnProcessedPotentialAliasTxs = getAliasTxs(
+ liveTestUnprocessedTxs,
+ config.aliasConstants,
+ );
+ // get registered aliases from theseLiveTestProcessedValidAliasTxs
+ const thisLiveTestRegisteredAliases = getAliasStringsFromValidAliases(
+ theseLiveTestProcessedValidAliasTxs,
+ );
+ // Incrementally determine valid aliases from unprocessedtxs
+ const thisLiveTestUnprocessedAliasRegistrations =
+ getUnprocessedValidAliasRegistrations(
+ thisLiveTestRegisteredAliases,
+ thisLiveTestUnProcessedPotentialAliasTxs,
+ );
+
+ // expect liveTestUnprocessedTxs.concat(liveTestProcessedTxs) === aliasTxHistory
+ // NB order of the above is important^
+ // txs of aliasTxHistory will be newest to oldest
+ try {
+ assert.deepEqual(
+ liveTestUnprocessedTxs.concat(liveTestProcessedTxs),
+ aliasTxHistory,
+ );
+ console.log(
+ '\x1b[32m%s\x1b[0m',
+ `✔ Passed live API test of getUnprocessedTxHistory for ${liveTestUnprocessedTxCount} unprocessed txs`,
+ );
+ } catch (err) {
+ console.log(
+ '\x1b[31m%s\x1b[0m',
+ `Failed live API test of getUnprocessedTxHistory for ${liveTestUnprocessedTxCount} unprocessed txs`,
+ );
+ failedTestCount += 1;
+ // Write some constants to a file for review
+ const liveApiFailureSummary = {
+ processedTxCount: liveTestProcessedTxs.length,
+ unprocessedTxCount: liveTestUnprocessedTxs.length,
+ totalTxCount: aliasTxHistory.length,
+ liveTestProcessedTxs,
+ liveTestUnprocessedTxs,
+ aliasTxHistory,
+ };
+ fs.writeFileSync(
+ `${mocksDir}/liveApiTest_getUnprocessedTxHistory_${liveTestUnprocessedTxCount}_unprocessedTxs.json`,
+
+ JSON.stringify(liveApiFailureSummary, null, 2),
+ 'utf-8',
+ );
+ }
+ // expect theseLiveTestProcessedValidAliasTxs.concat(thisLiveTestUnprocessedAliasRegistrations) === validAliasTxs
+ try {
+ assert.deepEqual(
+ theseLiveTestProcessedValidAliasTxs.concat(
+ thisLiveTestUnprocessedAliasRegistrations,
+ ),
+ validAliasTxs,
+ );
+ console.log(
+ '\x1b[32m%s\x1b[0m',
+ `✔ Passed live API test of getUnprocessedValidAliasRegistrations for ${liveTestUnprocessedTxCount} unprocessed txs`,
+ );
+ } catch (err) {
+ console.log(
+ '\x1b[31m%s\x1b[0m',
+ `Failed live API test of getUnprocessedValidAliasRegistrations for ${liveTestUnprocessedTxCount} unprocessed txs`,
+ );
+ failedTestCount += 1;
+ }
+ }
+ if (failedTestCount > 0) {
+ // Print failed tests
+ console.log('\x1b[31m%s\x1b[0m', `Failed ${failedTestCount} tests`);
+ process.exit(1);
+ }
+
+ // Exit script in success condition
+ process.exit(0);
+}
+
+fetchAliasesAndTestApi();
diff --git a/web/alias-server/test/chronik.js b/web/alias-server/test/chronik.js
--- a/web/alias-server/test/chronik.js
+++ b/web/alias-server/test/chronik.js
@@ -1,6 +1,10 @@
const assert = require('assert');
const config = require('../config');
const { getUnprocessedTxHistory } = require('../chronik');
+const {
+ getTestableUnprocessedTxCounts,
+ prepareMocksForGetUnprocessedTxHistory,
+} = require('../utils');
const {
allTxHistoryFromChronik,
unconfirmedTxs,
@@ -117,118 +121,29 @@
// Note: txs are processed by blockheight. So, test will only work if you look at batches of txs from different blocks
// There is an operating assumption here that chronik tx history will always return all confirmed txs at a given address
- const blockheightDeltaTxCounts = [];
- for (let i = 0; i < allTxHistoryFromChronik.length - 1; i += 1) {
- // Find the blockheight cutoffs
- const thisTx = allTxHistoryFromChronik[i];
- const nextTx = allTxHistoryFromChronik[i + 1];
- let thisTxBlockheight, nextTxBlockheight;
- thisTxBlockheight =
- typeof thisTx.block === 'undefined'
- ? config.unconfirmedBlockheight
- : thisTx.block.height;
- nextTxBlockheight =
- typeof nextTx.block === 'undefined'
- ? config.unconfirmedBlockheight
- : nextTx.block.height;
-
- // If the next blockheight is different, then an acceptable number
- // of unprocessedTxs to exclude from a given set of chronikTxHistory is (i + 1)
- if (thisTxBlockheight !== nextTxBlockheight) {
- blockheightDeltaTxCounts.push(i + 1);
- }
- }
+ const testableUnprocessedTxCounts = getTestableUnprocessedTxCounts(
+ allTxHistoryFromChronik,
+ );
// Iterate over all block cutoff amounts of unprocessedTxs to test
- for (let i = 0; i < blockheightDeltaTxCounts; i += 1) {
- const desiredUnprocessedTxs = blockheightDeltaTxCounts[i];
+ for (let i = 0; i < testableUnprocessedTxCounts; i += 1) {
+ const desiredUnprocessedTxs = testableUnprocessedTxCounts[i];
- // Create an array processedTxs that is equivalent to allTxHistoryFromChronik less the most recent desiredUnprocessedTxs
- // i.e. remove the first desiredUnprocessedTxs entries
- const processedTxs = allTxHistoryFromChronik.slice(
- desiredUnprocessedTxs,
- );
- const processedBlockheight = processedTxs[0].block.height;
- const processedTxCount = processedTxs.length;
- // Create an array unprocessedTxs that is equivalent to the most recent desiredUnprocessedTxs txs in allTxHistoryFromChronik
- // i.e. take only the first desiredUnprocessedTxs entries
- const unprocessedTxs = allTxHistoryFromChronik.slice(
- 0,
+ const {
+ processedBlockheight,
+ processedTxCount,
+ optionalMocks,
+ expectedResult,
+ } = prepareMocksForGetUnprocessedTxHistory(
desiredUnprocessedTxs,
+ aliasTxHistory,
);
- console.log(`processedTxs.length`, processedTxs.length);
- console.log(`unprocessedTxs.length`, unprocessedTxs.length);
- console.log(
- `processedTxs.length + unprocessedTxs.length`,
- processedTxs.length + unprocessedTxs.length,
- );
- const allTxHistory = allTxHistoryFromChronik;
-
- const numPages = Math.ceil(
- allTxHistory.length / config.txHistoryPageSize,
- );
-
- const txHistoryFirstPageTxs = allTxHistory.slice(
- 0,
- config.txHistoryPageSize,
- );
- const oldestTxOnFirstPage =
- txHistoryFirstPageTxs[txHistoryFirstPageTxs.length - 1];
-
- let alreadyHaveAllPotentiallyUnprocessedTxs = false;
- if (typeof oldestTxOnFirstPage.block === 'undefined') {
- alreadyHaveAllPotentiallyUnprocessedTxs = false;
- } else {
- // If the oldest tx on the first page hasa blockheight that has already been processed
- if (oldestTxOnFirstPage.block.height <= processedBlockheight) {
- // Then you have enough txs with this one call
- alreadyHaveAllPotentiallyUnprocessedTxs = true;
- }
- }
-
- const txHistoryFirstPageResponse = {
- txs: txHistoryFirstPageTxs,
- numPages,
- };
- // Calculate these values as in the function
- let maxTxs, maxUnprocessedTxCount, numPagesToFetch;
- let remainingTxHistoryPageResponses = [];
- if (!alreadyHaveAllPotentiallyUnprocessedTxs) {
- maxTxs = config.txHistoryPageSize * numPages;
- maxUnprocessedTxCount = maxTxs - processedTxCount;
- numPagesToFetch = Math.ceil(
- maxUnprocessedTxCount / config.txHistoryPageSize,
- );
- // Create this mock chronik response as in the function
- for (let i = 1; i < numPagesToFetch; i += 1) {
- // each page will have config.txHistoryPageSize txs
- // txs will be ordered most recent to oldest
- const txs = unprocessedTxs.slice(
- i * config.txHistoryPageSize,
- (i + 1) * config.txHistoryPageSize,
- );
- remainingTxHistoryPageResponses.push({ txs, numPages });
- }
- }
-
- const optionalMocks = {
- txHistoryFirstPageResponse,
- remainingTxHistoryPageResponses,
- };
-
const result = await getUnprocessedTxHistory(
config.aliasConstants.registrationHash160,
processedBlockheight,
processedTxCount,
optionalMocks,
);
- const expectedResult = {
- maxTxs,
- maxUnprocessedTxCount,
- numPagesToFetch,
- alreadyHaveAllPotentiallyUnprocessedTxs,
- unprocessedTxs: unprocessedTxs,
- };
assert.deepEqual(result, expectedResult);
}
diff --git a/web/alias-server/utils.js b/web/alias-server/utils.js
--- a/web/alias-server/utils.js
+++ b/web/alias-server/utils.js
@@ -132,4 +132,116 @@
}
return confirmedTxHistory;
},
+ getTestableUnprocessedTxCounts: function (chronikTxHistory) {
+ /*
+ chronik tx history will always return
+ - All confirmed txs at an address
+ - All unconfirmed txs as seen by the node
+
+ alias-server functions only process confirmed txs for valid aliases
+ Hence, to test unprocessed transaction batches, we need to split up
+ an array of chronikTxHistory by blockheight
+ */
+ const testableUnprocessedTxCounts = [];
+ for (let i = 0; i < chronikTxHistory.length - 1; i += 1) {
+ // Find the blockheight cutoffs
+ const thisTx = chronikTxHistory[i];
+ const nextTx = chronikTxHistory[i + 1];
+ let thisTxBlockheight, nextTxBlockheight;
+ thisTxBlockheight =
+ typeof thisTx.block === 'undefined'
+ ? config.unconfirmedBlockheight
+ : thisTx.block.height;
+ nextTxBlockheight =
+ typeof nextTx.block === 'undefined'
+ ? config.unconfirmedBlockheight
+ : nextTx.block.height;
+
+ // If the next blockheight is different, then an acceptable number
+ // of unprocessedTxs to exclude from a given set of chronikTxHistory is (i + 1)
+ if (thisTxBlockheight !== nextTxBlockheight) {
+ testableUnprocessedTxCounts.push(i + 1);
+ }
+ }
+ return testableUnprocessedTxCounts;
+ },
+ prepareMocksForGetUnprocessedTxHistory: function (
+ desiredUnprocessedTxs,
+ aliasTxHistory,
+ ) {
+ // Create an array processedTxs that is equivalent to allTxHistoryFromChronik less the most recent desiredUnprocessedTxs
+ // i.e. remove the first desiredUnprocessedTxs entries
+ const processedTxs = aliasTxHistory.slice(desiredUnprocessedTxs);
+ const processedBlockheight = processedTxs[0].block.height;
+ const processedTxCount = processedTxs.length;
+ // Create an array unprocessedTxs that is equivalent to the most recent desiredUnprocessedTxs txs in allTxHistoryFromChronik
+ // i.e. take only the first desiredUnprocessedTxs entries
+ const unprocessedTxs = aliasTxHistory.slice(0, desiredUnprocessedTxs);
+
+ const allTxHistory = aliasTxHistory;
+
+ const numPages = Math.ceil(
+ allTxHistory.length / config.txHistoryPageSize,
+ );
+
+ const txHistoryFirstPageTxs = allTxHistory.slice(
+ 0,
+ config.txHistoryPageSize,
+ );
+ const oldestTxOnFirstPage =
+ txHistoryFirstPageTxs[txHistoryFirstPageTxs.length - 1];
+
+ let alreadyHaveAllPotentiallyUnprocessedTxs = false;
+ if (typeof oldestTxOnFirstPage.block === 'undefined') {
+ alreadyHaveAllPotentiallyUnprocessedTxs = false;
+ } else {
+ // If the oldest tx on the first page hasa blockheight that has already been processed
+ if (oldestTxOnFirstPage.block.height <= processedBlockheight) {
+ // Then you have enough txs with this one call
+ alreadyHaveAllPotentiallyUnprocessedTxs = true;
+ }
+ }
+
+ const txHistoryFirstPageResponse = {
+ txs: txHistoryFirstPageTxs,
+ numPages,
+ };
+ // Calculate these values as in the function
+ let maxTxs, maxUnprocessedTxCount, numPagesToFetch;
+ let remainingTxHistoryPageResponses = [];
+ if (!alreadyHaveAllPotentiallyUnprocessedTxs) {
+ maxTxs = config.txHistoryPageSize * numPages;
+ maxUnprocessedTxCount = maxTxs - processedTxCount;
+ numPagesToFetch = Math.ceil(
+ maxUnprocessedTxCount / config.txHistoryPageSize,
+ );
+ // Create this mock chronik response as in the function
+ for (let i = 1; i < numPagesToFetch; i += 1) {
+ // each page will have config.txHistoryPageSize txs
+ // txs will be ordered most recent to oldest
+ const txs = unprocessedTxs.slice(
+ i * config.txHistoryPageSize,
+ (i + 1) * config.txHistoryPageSize,
+ );
+ remainingTxHistoryPageResponses.push({ txs, numPages });
+ }
+ }
+ const optionalMocks = {
+ txHistoryFirstPageResponse,
+ remainingTxHistoryPageResponses,
+ };
+ const expectedResult = {
+ maxTxs,
+ maxUnprocessedTxCount,
+ numPagesToFetch,
+ alreadyHaveAllPotentiallyUnprocessedTxs,
+ unprocessedTxs: unprocessedTxs,
+ };
+ return {
+ processedBlockheight,
+ processedTxCount,
+ optionalMocks,
+ expectedResult,
+ };
+ },
};
File Metadata
Details
Attached
Mime Type
text/plain
Expires
Thu, Feb 6, 16:33 (17 h, 43 m)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
5082704
Default Alt Text
D13358.id38638.diff (21 KB)
Attached To
D13358: [alias-server] Add live test script for parsing all tx history with the API
Event Timeline
Log In to Comment