diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -174,7 +174,7 @@ - When reviewers approve your Diff, it should be listed as "ready to Land" in Phabricator. When you want to commit your diff to the repository, check out -type my-topic-branch in git, then type `arc land`. You have now succesfully +type my-topic-branch in git, then type `arc land`. You have now successfully committed a change to the Bitcoin ABC repository. - When reviewing a Diff, apply the changeset on your local by using diff --git a/arcanist/linter/CheckDocLinter.php b/arcanist/linter/CheckDocLinter.php --- a/arcanist/linter/CheckDocLinter.php +++ b/arcanist/linter/CheckDocLinter.php @@ -58,7 +58,7 @@ protected function parseLinterOutput($path, $err, $stdout, $stderr) { /* Split stdout: - * 0 => Empty (before first 'Args' occurence) + * 0 => Empty (before first 'Args' occurrence) * 1 => Args used: count * 2 => Args documented: count * 3 => Args undocumented: count and list diff --git a/cmake/modules/FindBerkeleyDB.cmake b/cmake/modules/FindBerkeleyDB.cmake --- a/cmake/modules/FindBerkeleyDB.cmake +++ b/cmake/modules/FindBerkeleyDB.cmake @@ -1,4 +1,4 @@ -# Try to find the BerkeleyDB librairies +# Try to find the BerkeleyDB libraries # BDB_FOUND - system has Berkeley DB lib # BDB_INCLUDE_DIR - the Berkeley DB include directory # BDB_LIBRARY - Library needed to use Berkeley DB diff --git a/cmake/modules/FindGMP.cmake b/cmake/modules/FindGMP.cmake --- a/cmake/modules/FindGMP.cmake +++ b/cmake/modules/FindGMP.cmake @@ -1,4 +1,4 @@ -# Try to find the GMP librairies +# Try to find the GMP libraries # GMP_FOUND - system has GMP lib # GMP_INCLUDE_DIR - the GMP include directory # GMP_LIBRARY - Library needed to use GMP diff --git a/cmake/modules/FindZeroMQ.cmake b/cmake/modules/FindZeroMQ.cmake --- a/cmake/modules/FindZeroMQ.cmake +++ b/cmake/modules/FindZeroMQ.cmake @@ -1,4 +1,4 @@ -# Try to find the ZeroMQ librairies +# Try to find the ZeroMQ libraries # ZMQ_FOUND - system has ZeroMQ lib # ZMQ_INCLUDE_DIR - the ZeroMQ include directory # ZMQ_LIBRARY - Libraries needed to use ZeroMQ diff --git a/cmake/platforms/OSX.cmake b/cmake/platforms/OSX.cmake --- a/cmake/platforms/OSX.cmake +++ b/cmake/platforms/OSX.cmake @@ -16,7 +16,7 @@ # set 1st to dir with the cross compiler's C/C++ headers/libs set(CMAKE_FIND_ROOT_PATH "${CMAKE_CURRENT_SOURCE_DIR}/depends/${TOOLCHAIN_PREFIX};${OSX_SDK_PATH}") -# We also may have built dependancies for the native plateform. +# We also may have built dependencies for the native plateform. set(CMAKE_PREFIX_PATH "${CMAKE_CURRENT_SOURCE_DIR}/depends/${TOOLCHAIN_PREFIX}/native") # modify default behavior of FIND_XXX() commands to diff --git a/cmake/platforms/Win32.cmake b/cmake/platforms/Win32.cmake --- a/cmake/platforms/Win32.cmake +++ b/cmake/platforms/Win32.cmake @@ -12,7 +12,7 @@ # set 1st to dir with the cross compiler's C/C++ headers/libs set(CMAKE_FIND_ROOT_PATH "${CMAKE_CURRENT_SOURCE_DIR}/depends/${TOOLCHAIN_PREFIX};/usr/${TOOLCHAIN_PREFIX}") -# We also may have built dependancies for the native plateform. +# We also may have built dependencies for the native plateform. set(CMAKE_PREFIX_PATH "${CMAKE_CURRENT_SOURCE_DIR}/depends/${TOOLCHAIN_PREFIX}/native") # modify default behavior of FIND_XXX() commands to diff --git a/cmake/platforms/Win64.cmake b/cmake/platforms/Win64.cmake --- a/cmake/platforms/Win64.cmake +++ b/cmake/platforms/Win64.cmake @@ -12,7 +12,7 @@ # set 1st to dir with the cross compiler's C/C++ headers/libs set(CMAKE_FIND_ROOT_PATH "${CMAKE_CURRENT_SOURCE_DIR}/depends/${TOOLCHAIN_PREFIX};/usr/${TOOLCHAIN_PREFIX}") -# We also may have built dependancies for the native plateform. +# We also may have built dependencies for the native plateform. set(CMAKE_PREFIX_PATH "${CMAKE_CURRENT_SOURCE_DIR}/depends/${TOOLCHAIN_PREFIX}/native") # modify default behavior of FIND_XXX() commands to diff --git a/contrib/debian/changelog b/contrib/debian/changelog --- a/contrib/debian/changelog +++ b/contrib/debian/changelog @@ -220,7 +220,7 @@ * Add test_bitcoin to build test * Fix clean - * Remove unnecessary build-dependancies + * Remove unnecessary build-dependencies -- Matt Corallo Wed, 26 Oct 2011 14:37:18 -0400 diff --git a/contrib/debian/examples/bitcoin.conf b/contrib/debian/examples/bitcoin.conf --- a/contrib/debian/examples/bitcoin.conf +++ b/contrib/debian/examples/bitcoin.conf @@ -76,7 +76,7 @@ #rpcuser=Ulysseys #rpcpassword=YourSuperGreatPasswordNumber_DO_NOT_USE_THIS_OR_YOU_WILL_GET_ROBBED_385593 # -# The second method `rpcauth` can be added to server startup argument. It is set at intialization time +# The second method `rpcauth` can be added to server startup argument. It is set at initialization time # using the output from the script in share/rpcuser/rpcuser.py after providing a username: # # ./share/rpcuser/rpcuser.py alice diff --git a/doc/abc/uahf-technical-spec.md b/doc/abc/uahf-technical-spec.md --- a/doc/abc/uahf-technical-spec.md +++ b/doc/abc/uahf-technical-spec.md @@ -213,7 +213,7 @@ RATIONALE: see Motivation section of BIP143 [2]. -NOTE 1: refer to [3] for the specificaton of the revised transaction +NOTE 1: refer to [3] for the specification of the revised transaction digest based on BIP143. Revisions were made to account for non-Segwit deployment. @@ -273,7 +273,7 @@ [2] https://github.com/bitcoin/bips/blob/master/bip-0143.mediawiki#Motivation -[3] [Digest for replay protected signature verification accross hard forks](replay-protected-sighash.md) +[3] [Digest for replay protected signature verification across hard forks](replay-protected-sighash.md) [4] https://github.com/Bitcoin-UAHF/spec/blob/master/uahf-test-plan.md diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -159,7 +159,7 @@ add_subdirectory(univalue) # Because the Bitcoin ABc source code is disorganised, we -# end up with a bunch of libraries without any aparent +# end up with a bunch of libraries without any apparent # cohesive structure. This is inherited from Bitcoin Core # and reflecting this. # TODO: Improve the structure once cmake is rocking. diff --git a/src/config/CMakeLists.txt b/src/config/CMakeLists.txt --- a/src/config/CMakeLists.txt +++ b/src/config/CMakeLists.txt @@ -7,7 +7,7 @@ include(CheckSymbolExists) include(CheckCXXSourceCompiles) -# Package informations +# Package information set(PACKAGE_NAME "Bitcoin ABC") # Version diff --git a/src/core_write.cpp b/src/core_write.cpp --- a/src/core_write.cpp +++ b/src/core_write.cpp @@ -121,7 +121,7 @@ uint32_t flags = SCRIPT_VERIFY_STRICTENC; if (vch.back() & SIGHASH_FORKID) { // If the transaction is using SIGHASH_FORKID, we need - // to set the apropriate flag. + // to set the appropriate flag. // TODO: Remove after the Hard Fork. flags |= SCRIPT_ENABLE_SIGHASH_FORKID; } diff --git a/src/rcu.h b/src/rcu.h --- a/src/rcu.h +++ b/src/rcu.h @@ -107,7 +107,7 @@ } /** - * Construct a new RCUPtr without transfering owership. + * Construct a new RCUPtr without transferring owership. */ static RCUPtr copy(T *ptr) { if (ptr != nullptr) { diff --git a/src/rcu.cpp b/src/rcu.cpp --- a/src/rcu.cpp +++ b/src/rcu.cpp @@ -74,7 +74,7 @@ * ^ * Nadded -| * - * After a succesful deletion, threadInfos now points to NChild and the CAS to + * After a successful deletion, threadInfos now points to NChild and the CAS to * move it to Nadded will fail, causing the insertion process to fail. * * We also run into problems when several nodes are deleted concurrently. @@ -181,7 +181,7 @@ // system. Let's make sure threads that land here proceed one by one. // XXX: The best option long term is most likely to use a futex on one of // the thread causing synchronization delay so this thread can be waked up - // at an apropriate time. + // at an appropriate time. static std::condition_variable cond; static Mutex cs; WAIT_LOCK(cs, lock); diff --git a/src/script/interpreter.h b/src/script/interpreter.h --- a/src/script/interpreter.h +++ b/src/script/interpreter.h @@ -66,7 +66,7 @@ const PrecomputedTransactionData &txdataIn) : txTo(txToIn), nIn(nInIn), amount(amountIn), txdata(&txdataIn) {} - // The overriden functions are now final. + // The overridden functions are now final. bool CheckSig(const std::vector &vchSigIn, const std::vector &vchPubKey, const CScript &scriptCode, diff --git a/src/script/interpreter.cpp b/src/script/interpreter.cpp --- a/src/script/interpreter.cpp +++ b/src/script/interpreter.cpp @@ -1122,7 +1122,7 @@ const valtype &data = stacktop(-2); - // Make sure the split point is apropriate. + // Make sure the split point is appropriate. uint64_t position = CScriptNum(stacktop(-1), fRequireMinimal).getint(); if (position > data.size()) { diff --git a/src/seeder/README.md b/src/seeder/README.md --- a/src/seeder/README.md +++ b/src/seeder/README.md @@ -23,7 +23,7 @@ ----- Assuming you want to run a dns seed on dnsseed.example.com, you will -need an authorative NS record in example.com's domain record, pointing +need an authoritative NS record in example.com's domain record, pointing to for example vps.example.com: $ dig -t NS dnsseed.example.com diff --git a/src/test/data/script_tests.json b/src/test/data/script_tests.json --- a/src/test/data/script_tests.json +++ b/src/test/data/script_tests.json @@ -917,8 +917,8 @@ ["0x01 0x00", "BIN2NUM 0 EQUAL", "P2SH,STRICTENC", "OK", "BIN2NUM, non-canonical argument "], ["0x04 0xffffff7f", "BIN2NUM 2147483647 EQUAL", "P2SH,STRICTENC", "OK", "BIN2NUM, maximum size argument "], ["0x04 0xffffffff", "BIN2NUM -2147483647 EQUAL", "P2SH,STRICTENC", "OK", "BIN2NUM, maximum size argument "], -["0x05 0xffffffff00", "BIN2NUM 2147483647 EQUAL", "P2SH,STRICTENC", "INVALID_NUMBER_RANGE", "BIN2NUM, oversized arguement"], -["0x05 0xffffff7f80", "BIN2NUM -2147483647 EQUAL", "P2SH,STRICTENC", "OK", "BIN2NUM, non-canonical maximum size arguement"], +["0x05 0xffffffff00", "BIN2NUM 2147483647 EQUAL", "P2SH,STRICTENC", "INVALID_NUMBER_RANGE", "BIN2NUM, oversized argument"], +["0x05 0xffffff7f80", "BIN2NUM -2147483647 EQUAL", "P2SH,STRICTENC", "OK", "BIN2NUM, non-canonical maximum size argument"], ["0x05 0x0100000000", "BIN2NUM 1 EQUAL", "P2SH,STRICTENC", "OK"], ["0x05 0xFE00000000", "BIN2NUM 254 EQUAL", "P2SH,STRICTENC", "OK"], ["0x05 0x0500000080", "BIN2NUM 0x01 0x85 EQUAL", "P2SH,STRICTENC", "OK"], @@ -1083,8 +1083,8 @@ [ "0x09 0x300702010102020001 0", "0x21 0x038282263212c609d9ea2a6e3e172de238d8c39cabd5ac1ca10646e23fd5f51508 CHECKDATASIG", - "P2SH,STRICTENC,NULLFAIL", - "SIG_DER", "Non cannonical DER encoding" + "P2SH,STRICTENC,NULLFAIL,CHECKDATASIG", + "SIG_DER", "Non canonical DER encoding" ], ["CHECKDATASIGVERIFY"], @@ -1124,8 +1124,8 @@ [ "0x09 0x300702010102020001 0", "0x21 0x038282263212c609d9ea2a6e3e172de238d8c39cabd5ac1ca10646e23fd5f51508 CHECKDATASIGVERIFY 1", - "P2SH,STRICTENC,NULLFAIL", - "SIG_DER", "Non cannonical DER encoding" + "P2SH,STRICTENC,NULLFAIL,CHECKDATASIG", + "SIG_DER", "Non canonical DER encoding" ], ["Schnorr flag on: CHECKDATASIG unchanged"], @@ -1165,8 +1165,8 @@ [ "0x09 0x300702010102020001 0", "0x21 0x038282263212c609d9ea2a6e3e172de238d8c39cabd5ac1ca10646e23fd5f51508 CHECKDATASIG", - "P2SH,STRICTENC,NULLFAIL,SCHNORR", - "SIG_DER", "Non cannonical DER encoding" + "P2SH,STRICTENC,NULLFAIL,CHECKDATASIG,SCHNORR", + "SIG_DER", "Non canonical DER encoding" ], ["Schnorr flag on: CHECKDATASIGVERIFY unchanged"], @@ -1206,8 +1206,8 @@ [ "0x09 0x300702010102020001 0", "0x21 0x038282263212c609d9ea2a6e3e172de238d8c39cabd5ac1ca10646e23fd5f51508 CHECKDATASIGVERIFY 1", - "P2SH,STRICTENC,NULLFAIL,SCHNORR", - "SIG_DER", "Non cannonical DER encoding" + "P2SH,STRICTENC,NULLFAIL,CHECKDATASIG,SCHNORR", + "SIG_DER", "Non canonical DER encoding" ], ["ADD"], diff --git a/src/test/sighashtype_tests.cpp b/src/test/sighashtype_tests.cpp --- a/src/test/sighashtype_tests.cpp +++ b/src/test/sighashtype_tests.cpp @@ -88,7 +88,7 @@ BOOST_AUTO_TEST_CASE(sighash_serialization_test) { std::set forkValues{0, 1, 0xab1fe9, 0xc81eea, 0xffffff}; - // Test all possible sig hash values embeded in signatures. + // Test all possible sig hash values embedded in signatures. for (uint32_t sigHashType = 0x00; sigHashType <= 0xff; sigHashType++) { for (uint32_t forkValue : forkValues) { uint32_t rawType = sigHashType | (forkValue << 8); diff --git a/src/test/util_tests.cpp b/src/test/util_tests.cpp --- a/src/test/util_tests.cpp +++ b/src/test/util_tests.cpp @@ -1280,7 +1280,7 @@ // lock. BOOST_CHECK_EQUAL(LockDirectory(dirname, lockname, true), true); - // Try to acquire the lock in the child process, this should be succesful. + // Try to acquire the lock in the child process, this should be successful. BOOST_CHECK_EQUAL(write(fd[1], &LockCommand, 1), 1); BOOST_CHECK_EQUAL(read(fd[1], &ch, 1), 1); BOOST_CHECK_EQUAL((bool)ch, true); diff --git a/src/validation.cpp b/src/validation.cpp --- a/src/validation.cpp +++ b/src/validation.cpp @@ -2599,7 +2599,7 @@ arith_uint256 requiredWork = pindexTip->nChainWork; switch (pindexTip->nHeight - pindexFork->nHeight) { // Limit the penality for depth 1, 2 and 3 to half a block - // worth of work to ensure we don't fork accidentaly. + // worth of work to ensure we don't fork accidentally. case 3: case 2: pindexExtraPow = pindexExtraPow->pprev; @@ -2974,7 +2974,7 @@ // In case this was parked, unpark it. UnparkBlock(pindex); - // Make sure it is added to the candidate list if apropriate. + // Make sure it is added to the candidate list if appropriate. if (pindex->IsValid(BlockValidity::TRANSACTIONS) && pindex->nChainTx) { setBlockIndexCandidates.insert(pindex); PruneBlockIndexCandidates(); @@ -3262,7 +3262,7 @@ pindex->nChainTx = (pindex->pprev ? pindex->pprev->nChainTx : 0) + pindex->nTx; if (pindex->nSequenceId == 0) { - // We assign a sequence is when transaction are recieved to + // We assign a sequence is when transaction are received to // prevent a miner from being able to broadcast a block but not // its content. However, a sequence id may have been set // manually, for instance via PreciousBlock, in which case, we @@ -3409,7 +3409,7 @@ * This allows validation of headers on which the PoW hasn't been done. * For example: to validate template handed to mining software. * Do not call this for any check that depends on the context. - * For context-dependant calls, see ContextualCheckBlockHeader. + * For context-dependent calls, see ContextualCheckBlockHeader. */ static bool CheckBlockHeader( const Config &config, const CBlockHeader &block, CValidationState &state, @@ -3722,7 +3722,7 @@ /** * If the provided block header is valid, add it to the block index. * - * Returns true if the block is succesfully added to the block index. + * Returns true if the block is successfully added to the block index. */ bool CChainState::AcceptBlockHeader(const Config &config, const CBlockHeader &block, diff --git a/src/wallet/fees.h b/src/wallet/fees.h --- a/src/wallet/fees.h +++ b/src/wallet/fees.h @@ -19,7 +19,7 @@ Amount GetMinimumFee(unsigned int nTxBytes, const CTxMemPool &pool); /** - * Estimate the minimum fee considering overriden fee rate from coin control + * Estimate the minimum fee considering overridden fee rate from coin control */ Amount GetMinimumFee(unsigned int nTxBytes, const CTxMemPool &pool, const CCoinControl &coinControl); diff --git a/src/wallet/wallet.cpp b/src/wallet/wallet.cpp --- a/src/wallet/wallet.cpp +++ b/src/wallet/wallet.cpp @@ -2829,7 +2829,7 @@ if (nChangePosInOut != -1) { tx.vout.insert(tx.vout.begin() + nChangePosInOut, tx_new->vout[nChangePosInOut]); - // We dont have the normal Create/Commit cycle, and dont want to + // We don't have the normal Create/Commit cycle, and don't want to // risk reusing change, so just remove the key from the keypool // here. if (!IsDeprecatedRPCEnabled(gArgs, "fundrawtransaction")) { diff --git a/test/functional/abc-replay-protection.py b/test/functional/abc-replay-protection.py --- a/test/functional/abc-replay-protection.py +++ b/test/functional/abc-replay-protection.py @@ -304,7 +304,7 @@ update_block(5, replay_txns) yield accepted() - # Ok, now we check if a reorg work properly accross the activation. + # Ok, now we check if a reorg work properly across the activation. postforkblockid = node.getbestblockhash() node.invalidateblock(postforkblockid) assert(replay_tx0_id in set(node.getrawmempool())) diff --git a/test/functional/abc-schnorr-activation.py b/test/functional/abc-schnorr-activation.py --- a/test/functional/abc-schnorr-activation.py +++ b/test/functional/abc-schnorr-activation.py @@ -473,7 +473,7 @@ assert schnorr_tx_id not in set(node.getrawmempool()) assert ecdsa_tx_id not in set(node.getrawmempool()) - # Ok, now we check if a rewind works properly accross the activation. + # Ok, now we check if a rewind works properly across the activation. # First, rewind the normal post-fork block. node.invalidateblock(postforkblock.hash) # txes popped back into mempool diff --git a/test/functional/feature_bip68_sequence.py b/test/functional/feature_bip68_sequence.py --- a/test/functional/feature_bip68_sequence.py +++ b/test/functional/feature_bip68_sequence.py @@ -407,7 +407,7 @@ for sf in softforks: if sf['id'] == 'csv' and sf['version'] == 5: return sf['reject']['status'] - raise AssertionError('Cannot find CSV fork activation informations') + raise AssertionError('Cannot find CSV fork activation information') # Make sure that BIP68 isn't being used to validate blocks, prior to # versionbits activation. If more blocks are mined prior to this test diff --git a/test/functional/feature_csv_activation.py b/test/functional/feature_csv_activation.py --- a/test/functional/feature_csv_activation.py +++ b/test/functional/feature_csv_activation.py @@ -107,7 +107,7 @@ for sf in softforks: if sf['id'] == 'csv' and sf['version'] == 5: return sf['reject']['status'] - raise AssertionError('Cannot find CSV fork activation informations') + raise AssertionError('Cannot find CSV fork activation information') class BIP68_112_113Test(ComparisonTestFramework): diff --git a/test/functional/test_runner.py b/test/functional/test_runner.py --- a/test/functional/test_runner.py +++ b/test/functional/test_runner.py @@ -153,7 +153,7 @@ parser.add_argument('--coverage', action='store_true', help='generate a basic coverage report for the RPC interface') parser.add_argument( - '--exclude', '-x', help='specify a comma-seperated-list of scripts to exclude. Do not include the .py extension in the name.') + '--exclude', '-x', help='specify a comma-separated-list of scripts to exclude. Do not include the .py extension in the name.') parser.add_argument('--extended', action='store_true', help='run the extended test suite in addition to the basic tests') parser.add_argument('--cutoff', type=int, default=DEFAULT_EXTENDED_CUTOFF, @@ -171,7 +171,7 @@ parser.add_argument('--tmpdirprefix', '-t', default=tempfile.gettempdir(), help="Root directory for datadirs") parser.add_argument('--junitouput', '-ju', - default=os.path.join(build_dir, 'junit_results.xml'), help="file that will store JUnit formated test results.") + default=os.path.join(build_dir, 'junit_results.xml'), help="file that will store JUnit formatted test results.") args, unknown_args = parser.parse_known_args()