It was the Bitcointalk forum that inspired us to create Bitcointalksearch.org - Bitcointalk is an excellent site that should be the default page for anybody dealing in cryptocurrency, since it is a virtual gold-mine of data. However, our experience and user feedback led us create our site; Bitcointalk's search is slow, and difficult to get the results you need, because you need to log in first to find anything useful - furthermore, there are rate limiters for their search functionality.
The aim of our project is to create a faster website that yields more results and faster without having to create an account and eliminate the need to log in - your personal data, therefore, will never be in jeopardy since we are not asking for any of your data and you don't need to provide them to use our site with all of its capabilities.
We created this website with the sole purpose of users being able to search quickly and efficiently in the field of cryptocurrency so they will have access to the latest and most accurate information and thereby assisting the crypto-community at large.
// This is MIDAS (Multi Interval Difficulty Adjustment System), a novel getnextwork algorithm. It responds quickly to
// huge changes in hashing power, is resistant to time warp attacks, and regulates the block rate to keep the block height
// close to the block height expected given the nominal block interval and the elapsed time. How close the
// correspondence between block height and wall clock time is, depends on how stable the hashing power has been. Maybe
// Bitcoin can wait 2 weeks between updates but no altcoin can.
// It is important that none of these intervals (5, 7, 9, 17) have any common divisor; eliminating the existence of
// harmonics is an important part of eliminating the effectiveness of timewarp attacks.
void avgRecentTimestamps(const CBlockIndex* pindexLast, int64_t *avgOf5, int64_t *avgOf7, int64_t *avgOf9, int64_t *avgOf17)
{
int blockoffset = 0;
int64_t oldblocktime;
int64_t blocktime;
*avgOf5 = *avgOf7 = *avgOf9 = *avgOf17 = 0;
if (pindexLast)
blocktime = pindexLast->GetBlockTime();
else blocktime = 0;
for (blockoffset = 0; blockoffset < 18; blockoffset++)
{
oldblocktime = blocktime;
if (pindexLast)
{
pindexLast = pindexLast->pprev;
blocktime = pindexLast->GetBlockTime();
}
else
{ // genesis block or previous
blocktime -= Params().TargetSpacing();
}
// for each block, add interval.
if (blockoffset < 5) *avgOf5 += (oldblocktime - blocktime);
if (blockoffset < 7) *avgOf7 += (oldblocktime - blocktime);
if (blockoffset < 9) *avgOf9 += (oldblocktime - blocktime);
*avgOf17 += (oldblocktime - blocktime);
}
// now we have the sums of the block intervals. Division gets us the averages.
*avgOf5 /= 5;
*avgOf7 /= 7;
*avgOf9 /= 9;
*avgOf17 /= 17;
}
unsigned int GetNextWorkRequired(const CBlockIndex *pindexLast, const CBlockHeader *pblock)
{
int64_t avgOf5;
int64_t avgOf9;
int64_t avgOf7;
int64_t avgOf17;
int64_t toofast;
int64_t tooslow;
int64_t difficultyfactor = 10000;
int64_t now;
int64_t BlockHeightTime;
int64_t nFastInterval = (Params().TargetSpacing() * 9 ) / 10; // seconds per block desired when far behind schedule
int64_t nSlowInterval = (Params().TargetSpacing() * 11) / 10; // seconds per block desired when far ahead of schedule
int64_t nIntervalDesired;
unsigned int nProofOfWorkLimit = Params().ProofOfWorkLimit().GetCompact();
if (pindexLast == NULL)
// Genesis Block
return nProofOfWorkLimit;
if (Params().AllowMinDifficultyBlocks())
{
// Special difficulty rule for testnet: If the new block's timestamp is more than 2* TargetSpacing then allow
// mining of a min-difficulty block.
if (pblock->nTime > pindexLast->nTime + Params().TargetSpacing() * 2)
return nProofOfWorkLimit;
else
{
// Return the last non-special-min-difficulty-rules-block
const CBlockIndex* pindex = pindexLast;
while (pindex->pprev && pindex->nHeight % nIntervalDesired != 0 && pindex->nBits == nProofOfWorkLimit)
pindex = pindex->pprev;
return pindex->nBits;
}
}
// Regulate block times so as to remain synchronized in the long run with the actual time. The first step is to
// calculate what interval we want to use as our regulatory goal. It depends on how far ahead of (or behind)
// schedule we are. If we're more than an adjustment period ahead or behind, we use the maximum (nSlowInterval) or minimum
// (nFastInterval) values; otherwise we calculate a weighted average somewhere in between them. The closer we are
// to being exactly on schedule the closer our selected interval will be to our nominal interval (TargetSpacing).
now = pindexLast->GetBlockTime();
BlockHeightTime = Params().GenesisBlock().nTime + pindexLast->nHeight * Params().TargetSpacing();
if (now < BlockHeightTime + Params().AdjustmentInterval() && now > BlockHeightTime )
// ahead of schedule by less than one interval.
nIntervalDesired = ((Params().AdjustmentInterval() - (now - BlockHeightTime)) * Params().TargetSpacing() +
(now - BlockHeightTime) * nFastInterval) / Params().AdjustmentInterval();
else if (now + Params().AdjustmentInterval() > BlockHeightTime && now < BlockHeightTime)
// behind schedule by less than one interval.
nIntervalDesired = ((Params().AdjustmentInterval() - (BlockHeightTime - now)) * Params().TargetSpacing() +
(BlockHeightTime - now) * nSlowInterval) / Params().AdjustmentInterval();
// ahead by more than one interval;
else if (now < BlockHeightTime) nIntervalDesired = nSlowInterval;
// behind by more than an interval.
else nIntervalDesired = nFastInterval;
// find out what average intervals over last 5, 7, 9, and 17 blocks have been.
avgRecentTimestamps(pindexLast, &avgOf5, &avgOf7, &avgOf9, &avgOf17);
// check for emergency adjustments. These are to bring the diff up or down FAST when a burst miner or multipool
// jumps on or off. Once they kick in they can adjust difficulty very rapidly, and they can kick in very rapidly
// after massive hash power jumps on or off.
// Important note: This is a self-damping adjustment because 8/5 and 5/8 are closer to 1 than 3/2 and 2/3. Do not
// screw with the constants in a way that breaks this relationship. Even though self-damping, it will usually
// overshoot slightly. But normal adjustment will handle damping without getting back to emergency.
toofast = (nIntervalDesired * 2) / 3;
tooslow = (nIntervalDesired * 3) / 2;
// both of these check the shortest interval to quickly stop when overshot. Otherwise first is longer and second shorter.
if (avgOf5 < toofast && avgOf9 < toofast && avgOf17 < toofast)
{ //emergency adjustment, slow down (longer intervals because shorter blocks)
LogPrintf("GetNextWorkRequired EMERGENCY RETARGET\n");
difficultyfactor *= 8;
difficultyfactor /= 5;
}
else if (avgOf5 > tooslow && avgOf7 > tooslow && avgOf9 > tooslow)
{ //emergency adjustment, speed up (shorter intervals because longer blocks)
LogPrintf("GetNextWorkRequired EMERGENCY RETARGET\n");
difficultyfactor *= 5;
difficultyfactor /= 8;
}
// If no emergency adjustment, check for normal adjustment.
else if (((avgOf5 > nIntervalDesired || avgOf7 > nIntervalDesired) && avgOf9 > nIntervalDesired && avgOf17 > nIntervalDesired) ||
((avgOf5 < nIntervalDesired || avgOf7 < nIntervalDesired) && avgOf9 < nIntervalDesired && avgOf17 < nIntervalDesired))
{ // At least 3 averages too high or at least 3 too low, including the two longest. This will be executed 3/16 of
// the time on the basis of random variation, even if the settings are perfect. It regulates one-sixth of the way
// to the calculated point.
LogPrintf("GetNextWorkRequired RETARGET\n");
difficultyfactor *= (6 * nIntervalDesired);
difficultyfactor /= (avgOf17 + 5 * nIntervalDesired));
}
// limit to doubling or halving. There are no conditions where this will make a difference unless there is an
// unsuspected bug in the above code.
if (difficultyfactor > 20000) difficultyfactor = 20000;
if (difficultyfactor < 5000) difficultyfactor = 5000;
uint256 bnNew;
uint256 bnOld;
bnOld.SetCompact(pindexLast->nBits);
if (difficultyfactor == 10000) // no adjustment.
return(bnOld.GetCompact());
bnNew = bnOld / difficultyfactor;
bnNew *= 10000;
if (bnNew > Params().ProofOfWorkLimit())
bnNew = Params().ProofOfWorkLimit();
LogPrintf("Actual time %d, Scheduled time for this block height = %d\n", now, BlockHeightTime );
LogPrintf("Nominal block interval = %d, regulating on interval %d to get back to schedule.\n",
Params().TargetSpacing(), nIntervalDesired );
LogPrintf("Intervals of last 5/7/9/17 blocks = %d / %d / %d.\n",
Params().TargetSpacing(), avgOf5, avgOf7, avgOf9, avgOf17);
LogPrintf("Difficulty Before Adjustment: %08x %s\n", pindexLast->nBits, bnOld.ToString());
LogPrintf("Difficulty After Adjustment: %08x %s\n", bnNew.GetCompact(), bnNew.ToString());
return bnNew.GetCompact();
}
// Only change once per interval
if ((pindexLast->nHeight+1) % nInterval != 0)
{
// Special difficulty rule for testnet:
if (fTestNet)
{
// If the new block's timestamp is more than 2* 10 minutes
// then allow mining of a min-difficulty block.
if (pblock->nTime > pindexLast->nTime + nTargetSpacing*2)
return nProofOfWorkLimit;
else
{
// Return the last non-special-min-difficulty-rules-block
const CBlockIndex* pindex = pindexLast;
while (pindex->pprev && pindex->nHeight % nInterval != 0 && pindex->nBits == nProofOfWorkLimit)
pindex = pindex->pprev;
return pindex->nBits;
}
}
return pindexLast->nBits;
}
int64 static GetBlockValue(int nHeight, int64 nFees)
{
int64 nSubsidy = 500 * COIN;
if(nHeight == 1)
{
nSubsidy = 160000000 * COIN;
}
// Subsidy is cut in half every 840000 blocks
nSubsidy >>= (nHeight / 840000);
return nSubsidy + nFees;
}
static const int64 nTargetTimespan = 10 * 60;
static const int64 nTargetSpacing = 5 * 60;
static const int64 nInterval = nTargetTimespan / nTargetSpacing;
//
// minimum amount of work that could possibly be required nTime after
// minimum work required was nBase
//
unsigned int ComputeMinWork(unsigned int nBase, int64 nTime)
{
// Testnet has min-difficulty blocks
// after nTargetSpacing*2 time between blocks:
if (fTestNet && nTime > nTargetSpacing*2)
return bnProofOfWorkLimit.GetCompact();
CBigNum bnResult;
bnResult.SetCompact(nBase);
while (nTime > 0 && bnResult < bnProofOfWorkLimit)
{
// Maximum 400% adjustment...
bnResult *= 4;
// ... in best-case exactly 4-times-normal target time
nTime -= nTargetTimespan*4;
}
if (bnResult > bnProofOfWorkLimit)
bnResult = bnProofOfWorkLimit;
return bnResult.GetCompact();
}
unsigned int static GetNextWorkRequired(const CBlockIndex* pindexLast, const CBlockHeader *pblock)
{
unsigned int nProofOfWorkLimit = bnProofOfWorkLimit.GetCompact();
// Genesis block
if (pindexLast == NULL)
return nProofOfWorkLimit;
// Only change once per interval
if ((pindexLast->nHeight+1) % nInterval != 0)
{
// Special difficulty rule for testnet:
if (fTestNet)
{
// If the new block's timestamp is more than 2* 10 minutes
// then allow mining of a min-difficulty block.
if (pblock->nTime > pindexLast->nTime + nTargetSpacing*2)
return nProofOfWorkLimit;
else
{
// Return the last non-special-min-difficulty-rules-block
const CBlockIndex* pindex = pindexLast;
while (pindex->pprev && pindex->nHeight % nInterval != 0 && pindex->nBits == nProofOfWorkLimit)
pindex = pindex->pprev;
return pindex->nBits;
}
}
return pindexLast->nBits;
}
// LiteBitcoin: This fixes an issue where a 51% attack can change difficulty at will.
// Go back the full period unless it's the first retarget after genesis. Code courtesy of Art Forz
int blockstogoback = nInterval-1;
if ((pindexLast->nHeight+1) != nInterval)
blockstogoback = nInterval;
// Go back by what we want to be 14 days worth of blocks
const CBlockIndex* pindexFirst = pindexLast;
for (int i = 0; pindexFirst && i < blockstogoback; i++)
pindexFirst = pindexFirst->pprev;
assert(pindexFirst);
// Limit adjustment step
int64 nActualTimespan = pindexLast->GetBlockTime() - pindexFirst->GetBlockTime();
printf(" nActualTimespan = %"PRI64d" before bounds\n", nActualTimespan);
if (nActualTimespan < nTargetTimespan/4)
nActualTimespan = nTargetTimespan/4;
if (nActualTimespan > nTargetTimespan*4)
nActualTimespan = nTargetTimespan*4;
// Retarget
CBigNum bnNew;
bnNew.SetCompact(pindexLast->nBits);
bnNew *= nActualTimespan;
bnNew /= nTargetTimespan;
if (bnNew > bnProofOfWorkLimit)
bnNew = bnProofOfWorkLimit;
/// debug print
printf("GetNextWorkRequired RETARGET\n");
printf("nTargetTimespan = %"PRI64d" nActualTimespan = %"PRI64d"\n", nTargetTimespan, nActualTimespan);
printf("Before: %08x %s\n", pindexLast->nBits, CBigNum().SetCompact(pindexLast->nBits).getuint256().ToString().c_str());
printf("After: %08x %s\n", bnNew.GetCompact(), bnNew.getuint256().ToString().c_str());
return bnNew.GetCompact();
}
bool CheckProofOfWork(uint256 hash, unsigned int nBits)
{
CBigNum bnTarget;
bnTarget.SetCompact(nBits);
// Check range
if (bnTarget <= 0 || bnTarget > bnProofOfWorkLimit)
return error("CheckProofOfWork() : nBits below minimum work");
// Check proof of work matches claimed amount
if (hash > bnTarget.getuint256())
return error("CheckProofOfWork() : hash doesn't match nBits");
return true;
}
// Return maximum amount of blocks that other nodes claim to have
int GetNumBlocksOfPeers()
{
return std::max(cPeerBlockCounts.median(), Checkpoints::GetTotalBlocksEstimate());
}