Changeset View
Changeset View
Standalone View
Standalone View
src/pow.cpp
// Copyright (c) 2009-2010 Satoshi Nakamoto | // Copyright (c) 2009-2010 Satoshi Nakamoto | ||||
// Copyright (c) 2009-2016 The Bitcoin Core developers | // Copyright (c) 2009-2016 The Bitcoin Core developers | ||||
// Distributed under the MIT software license, see the accompanying | // Distributed under the MIT software license, see the accompanying | ||||
// file COPYING or http://www.opensource.org/licenses/mit-license.php. | // file COPYING or http://www.opensource.org/licenses/mit-license.php. | ||||
#include "pow.h" | #include "pow.h" | ||||
#include "arith_uint256.h" | #include "arith_uint256.h" | ||||
#include "chain.h" | #include "chain.h" | ||||
#include "primitives/block.h" | #include "primitives/block.h" | ||||
#include "uint256.h" | #include "uint256.h" | ||||
unsigned int GetNextWorkRequired(const CBlockIndex *pindexLast, | uint32_t GetNextWorkRequired(const CBlockIndex *pindexPrev, | ||||
const CBlockHeader *pblock, | const CBlockHeader *pblock, | ||||
const Consensus::Params ¶ms) { | const Consensus::Params ¶ms) { | ||||
unsigned int nProofOfWorkLimit = | const uint32_t nProofOfWorkLimit = | ||||
UintToArith256(params.powLimit).GetCompact(); | UintToArith256(params.powLimit).GetCompact(); | ||||
// Genesis block | // Genesis block | ||||
if (pindexLast == NULL) return nProofOfWorkLimit; | if (pindexPrev == nullptr) { | ||||
return nProofOfWorkLimit; | |||||
} | |||||
// Only change once per difficulty adjustment interval | // Only change once per difficulty adjustment interval | ||||
if ((pindexLast->nHeight + 1) % params.DifficultyAdjustmentInterval() != | uint32_t nHeight = pindexPrev->nHeight + 1; | ||||
0) { | if (nHeight % params.DifficultyAdjustmentInterval() == 0) { | ||||
// Go back by what we want to be 14 days worth of blocks | |||||
assert(nHeight >= params.DifficultyAdjustmentInterval()); | |||||
uint32_t nHeightFirst = nHeight - params.DifficultyAdjustmentInterval(); | |||||
const CBlockIndex *pindexFirst = pindexPrev->GetAncestor(nHeightFirst); | |||||
assert(pindexFirst); | |||||
return CalculateNextWorkRequired(pindexPrev, | |||||
pindexFirst->GetBlockTime(), params); | |||||
} | |||||
if (params.fPowAllowMinDifficultyBlocks) { | if (params.fPowAllowMinDifficultyBlocks) { | ||||
dgenr8: Moving this block of code after the % == 0 check has the effect of disabling the testnet 20… | |||||
dgenr8Unsubmitted Not Done Inline ActionsNever mind, that was already the case! dgenr8: Never mind, that was already the case! | |||||
// Special difficulty rule for testnet: | // Special difficulty rule for testnet: | ||||
// If the new block's timestamp is more than 2* 10 minutes then | // If the new block's timestamp is more than 2* 10 minutes then allow | ||||
// allow mining of a min-difficulty block. | // mining of a min-difficulty block. | ||||
if (pblock->GetBlockTime() > | if (pblock->GetBlockTime() > | ||||
pindexLast->GetBlockTime() + params.nPowTargetSpacing * 2) { | pindexPrev->GetBlockTime() + 2 * params.nPowTargetSpacing) { | ||||
return nProofOfWorkLimit; | return nProofOfWorkLimit; | ||||
} | } | ||||
// Return the last non-special-min-difficulty-rules-block | // Return the last non-special-min-difficulty-rules-block | ||||
const CBlockIndex *pindex = pindexLast; | const CBlockIndex *pindex = pindexPrev; | ||||
while (pindex->pprev && | while (pindex->pprev && | ||||
pindex->nHeight % params.DifficultyAdjustmentInterval() != | pindex->nHeight % params.DifficultyAdjustmentInterval() != 0 && | ||||
0 && | pindex->nBits == nProofOfWorkLimit) { | ||||
pindex->nBits == nProofOfWorkLimit) | |||||
pindex = pindex->pprev; | pindex = pindex->pprev; | ||||
} | |||||
return pindex->nBits; | return pindex->nBits; | ||||
} | } | ||||
return pindexLast->nBits; | |||||
// We can't go bellow the minimum, so early bail. | |||||
uint32_t nBits = pindexPrev->nBits; | |||||
if (nBits == nProofOfWorkLimit) { | |||||
return nProofOfWorkLimit; | |||||
} | } | ||||
// Go back by what we want to be 14 days worth of blocks | // If producing the last 6 block took less than 12h, we keep the same | ||||
int nHeightFirst = | // difficulty. | ||||
pindexLast->nHeight - (params.DifficultyAdjustmentInterval() - 1); | const CBlockIndex *pindex6 = pindexPrev->GetAncestor(nHeight - 7); | ||||
assert(nHeightFirst >= 0); | assert(pindex6); | ||||
const CBlockIndex *pindexFirst = pindexLast->GetAncestor(nHeightFirst); | int64_t mtp6blocks = | ||||
assert(pindexFirst); | pindexPrev->GetMedianTimePast() - pindex6->GetMedianTimePast(); | ||||
if (mtp6blocks < 12 * 3600) { | |||||
return nBits; | |||||
} | |||||
// If producing the last 6 block took more than 12h, increase the difficulty | |||||
// target by 1/4 (which reduces the difficulty by 20%). This ensure the | |||||
// chain do not get stuck in case we lose hashrate abruptly. | |||||
arith_uint256 nPow; | |||||
nPow.SetCompact(nBits); | |||||
nPow += (nPow >> 2); | |||||
// Make sure we do not go bellow allowed values. | |||||
const arith_uint256 bnPowLimit = UintToArith256(params.powLimit); | |||||
if (nPow > bnPowLimit) nPow = bnPowLimit; | |||||
return CalculateNextWorkRequired(pindexLast, pindexFirst->GetBlockTime(), | return nPow.GetCompact(); | ||||
params); | |||||
} | } | ||||
unsigned int CalculateNextWorkRequired(const CBlockIndex *pindexLast, | uint32_t CalculateNextWorkRequired(const CBlockIndex *pindexPrev, | ||||
int64_t nFirstBlockTime, | int64_t nFirstBlockTime, | ||||
const Consensus::Params ¶ms) { | const Consensus::Params ¶ms) { | ||||
if (params.fPowNoRetargeting) return pindexLast->nBits; | if (params.fPowNoRetargeting) { | ||||
return pindexPrev->nBits; | |||||
} | |||||
// Limit adjustment step | // Limit adjustment step | ||||
int64_t nActualTimespan = pindexLast->GetBlockTime() - nFirstBlockTime; | int64_t nActualTimespan = pindexPrev->GetBlockTime() - nFirstBlockTime; | ||||
if (nActualTimespan < params.nPowTargetTimespan / 4) | if (nActualTimespan < params.nPowTargetTimespan / 4) { | ||||
nActualTimespan = params.nPowTargetTimespan / 4; | nActualTimespan = params.nPowTargetTimespan / 4; | ||||
if (nActualTimespan > params.nPowTargetTimespan * 4) | } | ||||
if (nActualTimespan > params.nPowTargetTimespan * 4) { | |||||
nActualTimespan = params.nPowTargetTimespan * 4; | nActualTimespan = params.nPowTargetTimespan * 4; | ||||
} | |||||
// Retarget | // Retarget | ||||
const arith_uint256 bnPowLimit = UintToArith256(params.powLimit); | const arith_uint256 bnPowLimit = UintToArith256(params.powLimit); | ||||
arith_uint256 bnNew; | arith_uint256 bnNew; | ||||
bnNew.SetCompact(pindexLast->nBits); | bnNew.SetCompact(pindexPrev->nBits); | ||||
bnNew *= nActualTimespan; | bnNew *= nActualTimespan; | ||||
bnNew /= params.nPowTargetTimespan; | bnNew /= params.nPowTargetTimespan; | ||||
if (bnNew > bnPowLimit) bnNew = bnPowLimit; | if (bnNew > bnPowLimit) bnNew = bnPowLimit; | ||||
return bnNew.GetCompact(); | return bnNew.GetCompact(); | ||||
} | } | ||||
bool CheckProofOfWork(uint256 hash, unsigned int nBits, | bool CheckProofOfWork(uint256 hash, uint32_t nBits, | ||||
const Consensus::Params ¶ms) { | const Consensus::Params ¶ms) { | ||||
bool fNegative; | bool fNegative; | ||||
bool fOverflow; | bool fOverflow; | ||||
arith_uint256 bnTarget; | arith_uint256 bnTarget; | ||||
bnTarget.SetCompact(nBits, &fNegative, &fOverflow); | bnTarget.SetCompact(nBits, &fNegative, &fOverflow); | ||||
// Check range | // Check range | ||||
if (fNegative || bnTarget == 0 || fOverflow || | if (fNegative || bnTarget == 0 || fOverflow || | ||||
bnTarget > UintToArith256(params.powLimit)) | bnTarget > UintToArith256(params.powLimit)) { | ||||
return false; | return false; | ||||
} | |||||
// Check proof of work matches claimed amount | // Check proof of work matches claimed amount | ||||
if (UintToArith256(hash) > bnTarget) return false; | if (UintToArith256(hash) > bnTarget) { | ||||
return false; | |||||
} | |||||
return true; | return true; | ||||
} | } |
Moving this block of code after the % == 0 check has the effect of disabling the testnet 20-minute rule at a difficulty boundary. Was that desired / necessary?