From 30a565c96c521722878e7c83fd6b25bd0db5d9f7 Mon Sep 17 00:00:00 2001 From: techcoderx Date: Sun, 30 May 2021 11:28:29 +0800 Subject: [PATCH 01/33] custom keys count limit --- src/config.js | 3 +++ src/transactions/newKey.js | 2 ++ 2 files changed, 5 insertions(+) diff --git a/src/config.js b/src/config.js index 8671c7b..5f5c39c 100644 --- a/src/config.js +++ b/src/config.js @@ -138,6 +138,9 @@ var config = { }, 4860000: { hotfix1: true + }, + 15000000: { + maxKeys: 25 } }, read: (blockNum) => { diff --git a/src/transactions/newKey.js b/src/transactions/newKey.js index 560f50a..dc5d193 100644 --- a/src/transactions/newKey.js +++ b/src/transactions/newKey.js @@ -22,6 +22,8 @@ module.exports = { } if (!account.keys) { cb(true); return + } else if (config.maxKeys && account.keys.length >= config.maxKeys) { + cb(false, 'cannot add more than ' + config.maxKeys + ' custom keys') } else { for (let i = 0; i < account.keys.length; i++) if (account.keys[i].id === tx.data.id) { From bb6c6d56e4518cead6518a3a08543dbf83fd242e Mon Sep 17 00:00:00 2001 From: techcoderx Date: Sun, 30 May 2021 11:34:40 +0800 Subject: [PATCH 02/33] disallow voting for inactive leaders --- src/config.js | 3 ++- src/transactions/approveNode.js | 4 +++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/src/config.js b/src/config.js index 5f5c39c..8852cee 100644 --- a/src/config.js +++ b/src/config.js @@ -140,7 +140,8 @@ var config = { hotfix1: true }, 15000000: { - maxKeys: 25 + maxKeys: 25, + disallowVotingInactiveLeader: true } }, read: (blockNum) => { diff --git a/src/transactions/approveNode.js b/src/transactions/approveNode.js index 6ce2e00..f7d93ab 100644 --- a/src/transactions/approveNode.js +++ b/src/transactions/approveNode.js @@ -17,7 +17,9 @@ module.exports = { cache.findOne('accounts', {name: tx.data.target}, function(err, account) { if (!account) cb(false, 'invalid tx target does not exist') - else + else if (config.disallowVotingInactiveLeader && !account.pub_leader) + cb(false, 'target does not have an activated leader signing key') + else cb(true) }) From e8588954bc42d001793baa5230d48819a81c3719 Mon Sep 17 00:00:00 2001 From: techcoderx Date: Sun, 30 May 2021 19:25:13 +0800 Subject: [PATCH 03/33] periodic burn every ecoBlocks --- src/chain.js | 30 +++- src/config.js | 3 +- src/economics.js | 382 +++++++++++++++++++++++------------------------ 3 files changed, 219 insertions(+), 196 deletions(-) diff --git a/src/chain.js b/src/chain.js index e95843a..1dbe4fb 100644 --- a/src/chain.js +++ b/src/chain.js @@ -574,7 +574,7 @@ chain = { }) var blockTimeBefore = new Date().getTime() - series(executions, function(err, results) { + series(executions, async function(err, results) { var string = 'executed' if(revalidate) string = 'validated & '+string logr.debug('Block '+string+' in '+(new Date().getTime()-blockTimeBefore)+'ms') @@ -591,10 +591,14 @@ chain = { burnedInBlock += results[i].burned } + // execute periodic burn + let additionalBurn = await chain.decayBurnAccount(block) + // add rewards for the leader who mined this block chain.leaderRewards(block.miner, block.timestamp, function(dist) { distributedInBlock += dist distributedInBlock = Math.round(distributedInBlock*1000) / 1000 + burnedInBlock += additionalBurn burnedInBlock = Math.round(burnedInBlock*1000) / 1000 cb(executedSuccesfully, distributedInBlock, burnedInBlock) }) @@ -688,6 +692,30 @@ chain = { else cb(0) }) }, + decayBurnAccount: (block) => { + return new Promise((rs) => { + if (!config.burnAccount || block._id % config.ecoBlocks !== 0) + return rs(0) + // offset inflation + let rp = eco.rewardPool() + let burnAmount = Math.floor(rp.dist) + if (burnAmount <= 0) + return rs(0) + cache.findOne('accounts', {name: config.burnAccount}, (e,burnAccount) => { + // do nothing if there is none to burn + if (burnAccount.balance <= 0) + return rs(0) + cache.updateOne('accounts', {name: config.burnAccount}, {$inc: {balance: -burnAmount}},() => + transaction.updateGrowInts(burnAccount, block.timestamp, () => { + transaction.adjustNodeAppr(burnAccount, -burnAmount, () => { + logr.econ('Burned ' + burnAmount + ' periodically from ' + config.burnAccount) + return rs(burnAmount) + }) + }) + ) + }) + }) + }, calculateHashForBlock: (block) => { return chain.calculateHash(block._id, block.phash, block.timestamp, block.txs, block.miner, block.missedBy, block.dist, block.burn) }, diff --git a/src/config.js b/src/config.js index 8852cee..cbab496 100644 --- a/src/config.js +++ b/src/config.js @@ -141,7 +141,8 @@ var config = { }, 15000000: { maxKeys: 25, - disallowVotingInactiveLeader: true + disallowVotingInactiveLeader: true, + burnAccount: 'dtube.airdrop' } }, read: (blockNum) => { diff --git a/src/economics.js b/src/economics.js index d2e4e66..3ea6939 100644 --- a/src/economics.js +++ b/src/economics.js @@ -33,72 +33,70 @@ var eco = { eco.lastRewardPool = eco.startRewardPool eco.startRewardPool = null }, - inflation: (cb) => { - cb(config.rewardPoolMult * config.rewardPoolUsers + config.rewardPoolMin) - return + inflation: () => { + return config.rewardPoolMult * config.rewardPoolUsers + config.rewardPoolMin }, - rewardPool: (cb) => { - eco.inflation(function(theoricalPool){ - var burned = 0 - var distributed = 0 - var votes = 0 - if (!eco.startRewardPool) { - var firstBlockIndex = chain.recentBlocks.length - config.ecoBlocks - if (firstBlockIndex < 0) firstBlockIndex = 0 - var weight = 1 - for (let i = firstBlockIndex; i < chain.recentBlocks.length; i++) { - const block = chain.recentBlocks[i] - if (block.burn) - burned += block.burn - if (block.dist) - distributed += block.dist - - for (let y = 0; y < block.txs.length; y++) { - var tx = block.txs[y] - if (tx.type === TransactionType.VOTE - || tx.type === TransactionType.COMMENT - || tx.type === TransactionType.PROMOTED_COMMENT - || (tx.type === TransactionType.TIPPED_VOTE && config.hotfix1)) - votes += Math.abs(tx.data.vt)*weight - } - weight++ - } - - // weighted average for votes - votes /= (weight+1)/2 - - eco.startRewardPool = { - burn: burned, - dist: distributed, - votes: votes, - theo: theoricalPool, - avail: theoricalPool - distributed + rewardPool: () => { + let theoricalPool = eco.inflation() + let burned = 0 + let distributed = 0 + let votes = 0 + if (!eco.startRewardPool) { + let firstBlockIndex = chain.recentBlocks.length - config.ecoBlocks + if (firstBlockIndex < 0) firstBlockIndex = 0 + let weight = 1 + for (let i = firstBlockIndex; i < chain.recentBlocks.length; i++) { + const block = chain.recentBlocks[i] + if (block.burn) + burned += block.burn + if (block.dist) + distributed += block.dist + + for (let y = 0; y < block.txs.length; y++) { + let tx = block.txs[y] + if (tx.type === TransactionType.VOTE + || tx.type === TransactionType.COMMENT + || tx.type === TransactionType.PROMOTED_COMMENT + || (tx.type === TransactionType.TIPPED_VOTE && config.hotfix1)) + votes += Math.abs(tx.data.vt)*weight } - } else { - burned = eco.startRewardPool.burn - distributed = eco.startRewardPool.dist - votes = eco.startRewardPool.votes + weight++ } - - var avail = theoricalPool - distributed - eco.currentBlock.dist - if (avail < 0) avail = 0 - burned += eco.currentBlock.burn - distributed += eco.currentBlock.dist - votes += eco.currentBlock.votes + // weighted average for votes + votes /= (weight+1)/2 - avail = Math.round(avail*Math.pow(10, config.ecoClaimPrecision))/Math.pow(10, config.ecoClaimPrecision) - burned = Math.round(burned*Math.pow(10, config.ecoClaimPrecision))/Math.pow(10, config.ecoClaimPrecision) - distributed = Math.round(distributed*Math.pow(10, config.ecoClaimPrecision))/Math.pow(10, config.ecoClaimPrecision) - votes = Math.round(votes*Math.pow(10, config.ecoClaimPrecision))/Math.pow(10, config.ecoClaimPrecision) - cb({ - theo: theoricalPool, + eco.startRewardPool = { burn: burned, dist: distributed, votes: votes, - avail: avail - }) - }) + theo: theoricalPool, + avail: theoricalPool - distributed + } + } else { + burned = eco.startRewardPool.burn + distributed = eco.startRewardPool.dist + votes = eco.startRewardPool.votes + } + + + var avail = theoricalPool - distributed - eco.currentBlock.dist + if (avail < 0) avail = 0 + burned += eco.currentBlock.burn + distributed += eco.currentBlock.dist + votes += eco.currentBlock.votes + + avail = Math.round(avail*Math.pow(10, config.ecoClaimPrecision))/Math.pow(10, config.ecoClaimPrecision) + burned = Math.round(burned*Math.pow(10, config.ecoClaimPrecision))/Math.pow(10, config.ecoClaimPrecision) + distributed = Math.round(distributed*Math.pow(10, config.ecoClaimPrecision))/Math.pow(10, config.ecoClaimPrecision) + votes = Math.round(votes*Math.pow(10, config.ecoClaimPrecision))/Math.pow(10, config.ecoClaimPrecision) + return { + theo: theoricalPool, + burn: burned, + dist: distributed, + votes: votes, + avail: avail + } }, accountPrice: (username) => { var price = config.accountPriceMin @@ -129,161 +127,157 @@ var eco = { winners.push(winner) } - eco.print(currentVote.vt, function(thNewCoins) { - // share the new coins between winners - var newCoins = 0 - for (let i = 0; i < winners.length; i++) { - if (!winners[i].gross) - winners[i].gross = 0 - - var won = thNewCoins * winners[i].share - var rentabilityWinner = eco.rentability(winners[i].ts, currentVote.ts) - won *= rentabilityWinner - won = Math.floor(won*Math.pow(10, config.ecoClaimPrecision))/Math.pow(10, config.ecoClaimPrecision) - winners[i].gross += won - newCoins += won - delete winners[i].share - - // logr.econ(winners[i].u+' wins '+won+' coins with rentability '+rentabilityWinner) - } - newCoins = Math.round(newCoins*Math.pow(10, config.ecoClaimPrecision))/Math.pow(10, config.ecoClaimPrecision) + let thNewCoins = eco.print(currentVote.vt) + // share the new coins between winners + var newCoins = 0 + for (let i = 0; i < winners.length; i++) { + if (!winners[i].gross) + winners[i].gross = 0 + + var won = thNewCoins * winners[i].share + var rentabilityWinner = eco.rentability(winners[i].ts, currentVote.ts) + won *= rentabilityWinner + won = Math.floor(won*Math.pow(10, config.ecoClaimPrecision))/Math.pow(10, config.ecoClaimPrecision) + winners[i].gross += won + newCoins += won + delete winners[i].share - // reconstruct the votes array - var newVotes = [] - for (let i = 0; i < content.votes.length; i++) - if (!content.votes[i].claimed && currentVote.vt*content.votes[i].vt > 0) { - for (let y = 0; y < winners.length; y++) - if (winners[y].u === content.votes[i].u) - newVotes.push(winners[y]) - } else newVotes.push(content.votes[i]) + // logr.econ(winners[i].u+' wins '+won+' coins with rentability '+rentabilityWinner) + } + newCoins = Math.round(newCoins*Math.pow(10, config.ecoClaimPrecision))/Math.pow(10, config.ecoClaimPrecision) - // if there are opposite votes - // burn 50% of the printed DTC in anti-chronological order - var newBurn = 0 - var takeAwayAmount = thNewCoins*config.ecoPunishPercent - var i = content.votes.length - 1 - while (takeAwayAmount !== 0 && i>=0) { - if (i === 0 && !config.ecoPunishAuthor) - break - if (!content.votes[i].claimed && content.votes[i].vt*currentVote.vt < 0) - if (content.votes[i].gross >= takeAwayAmount) { - content.votes[i].gross -= takeAwayAmount - newBurn += takeAwayAmount - takeAwayAmount = 0 - } else { - takeAwayAmount -= content.votes[i].gross - newBurn += content.votes[i].gross - content.votes[i].gross = 0 - } - i-- - } - newBurn = Math.round(newBurn*Math.pow(10, config.ecoClaimPrecision))/Math.pow(10, config.ecoClaimPrecision) - - logr.econ(newCoins + ' dist from the vote') - logr.econ(newBurn + ' burn from the vote') + // reconstruct the votes array + var newVotes = [] + for (let i = 0; i < content.votes.length; i++) + if (!content.votes[i].claimed && currentVote.vt*content.votes[i].vt > 0) { + for (let y = 0; y < winners.length; y++) + if (winners[y].u === content.votes[i].u) + newVotes.push(winners[y]) + } else newVotes.push(content.votes[i]) - // compute final claimable amount after author tip - let authorVote = -1 - let authorVoteClaimed = false - let totalAuthorTip = 0 - let precisionMulti = Math.pow(10,config.ecoClaimPrecision+config.tippedVotePrecision) - for (let v = 0; v < newVotes.length; v++) - if (newVotes[v].u === content.author) { - authorVote = v - if (newVotes[v].claimed) authorVoteClaimed = true - if (!config.allowRevotes) break + // if there are opposite votes + // burn 50% of the printed DTC in anti-chronological order + var newBurn = 0 + var takeAwayAmount = thNewCoins*config.ecoPunishPercent + var i = content.votes.length - 1 + while (takeAwayAmount !== 0 && i>=0) { + if (i === 0 && !config.ecoPunishAuthor) + break + if (!content.votes[i].claimed && content.votes[i].vt*currentVote.vt < 0) + if (content.votes[i].gross >= takeAwayAmount) { + content.votes[i].gross -= takeAwayAmount + newBurn += takeAwayAmount + takeAwayAmount = 0 + } else { + takeAwayAmount -= content.votes[i].gross + newBurn += content.votes[i].gross + content.votes[i].gross = 0 } - for (let v = 0; v < newVotes.length; v++) - if (authorVote >= 0 && newVotes[v].u !== content.author && newVotes[v].tip) { - if (!authorVoteClaimed) { - let tipAmt = (newVotes[v].gross * Math.pow(10,config.ecoClaimPrecision)) * (newVotes[v].tip * Math.pow(10,config.tippedVotePrecision)) - totalAuthorTip += tipAmt - newVotes[v].totalTip = tipAmt / precisionMulti - newVotes[v].claimable = ((newVotes[v].gross * precisionMulti) - tipAmt) / precisionMulti - } else - newVotes[v].claimable = ((newVotes[v].gross * precisionMulti) - (newVotes[v].totalTip * precisionMulti)) / precisionMulti - } else if (newVotes[v].u !== content.author) - newVotes[v].claimable = newVotes[v].gross - if (authorVote >= 0 && !authorVoteClaimed) - newVotes[authorVote].claimable = ((newVotes[authorVote].gross * precisionMulti) + totalAuthorTip) / precisionMulti + i-- + } + newBurn = Math.round(newBurn*Math.pow(10, config.ecoClaimPrecision))/Math.pow(10, config.ecoClaimPrecision) + + logr.econ(newCoins + ' dist from the vote') + logr.econ(newBurn + ' burn from the vote') + + // compute final claimable amount after author tip + let authorVote = -1 + let authorVoteClaimed = false + let totalAuthorTip = 0 + let precisionMulti = Math.pow(10,config.ecoClaimPrecision+config.tippedVotePrecision) + for (let v = 0; v < newVotes.length; v++) + if (newVotes[v].u === content.author) { + authorVote = v + if (newVotes[v].claimed) authorVoteClaimed = true + if (!config.allowRevotes) break + } + for (let v = 0; v < newVotes.length; v++) + if (authorVote >= 0 && newVotes[v].u !== content.author && newVotes[v].tip) { + if (!authorVoteClaimed) { + let tipAmt = (newVotes[v].gross * Math.pow(10,config.ecoClaimPrecision)) * (newVotes[v].tip * Math.pow(10,config.tippedVotePrecision)) + totalAuthorTip += tipAmt + newVotes[v].totalTip = tipAmt / precisionMulti + newVotes[v].claimable = ((newVotes[v].gross * precisionMulti) - tipAmt) / precisionMulti + } else + newVotes[v].claimable = ((newVotes[v].gross * precisionMulti) - (newVotes[v].totalTip * precisionMulti)) / precisionMulti + } else if (newVotes[v].u !== content.author) + newVotes[v].claimable = newVotes[v].gross + if (authorVote >= 0 && !authorVoteClaimed) + newVotes[authorVote].claimable = ((newVotes[authorVote].gross * precisionMulti) + totalAuthorTip) / precisionMulti - // add dist/burn/votes to currentBlock eco stats - eco.currentBlock.dist += newCoins - eco.currentBlock.dist = Math.round(eco.currentBlock.dist*Math.pow(10, config.ecoClaimPrecision))/Math.pow(10, config.ecoClaimPrecision) - eco.currentBlock.burn += newBurn - eco.currentBlock.burn = Math.round(eco.currentBlock.burn*Math.pow(10, config.ecoClaimPrecision))/Math.pow(10, config.ecoClaimPrecision) - eco.currentBlock.votes += currentVote.vt + // add dist/burn/votes to currentBlock eco stats + eco.currentBlock.dist += newCoins + eco.currentBlock.dist = Math.round(eco.currentBlock.dist*Math.pow(10, config.ecoClaimPrecision))/Math.pow(10, config.ecoClaimPrecision) + eco.currentBlock.burn += newBurn + eco.currentBlock.burn = Math.round(eco.currentBlock.burn*Math.pow(10, config.ecoClaimPrecision))/Math.pow(10, config.ecoClaimPrecision) + eco.currentBlock.votes += currentVote.vt - // updating the content - // increase the dist amount for display - // and update the votes array - cache.updateOne('contents', {_id: author+'/'+link}, { - $inc: {dist: newCoins}, - $set: {votes: newVotes} - }, function() { - if (config.masterFee > 0 && newCoins > 0) { - // apply the master fee - var distBefore = content.dist - if (!distBefore) distBefore = 0 - var distAfter = distBefore + newCoins - var benefReward = Math.floor(distAfter/config.masterFee) - Math.floor(distBefore/config.masterFee) - if (benefReward > 0) - cache.updateOne('accounts', {name: config.masterName}, {$inc: {balance: benefReward}}, function() { - cache.insertOne('distributed', { - name: config.masterName, - dist: benefReward, - ts: currentVote.ts, - _id: content.author+'/'+content.link+'/'+currentVote.u+'/'+config.masterName - }, function() { - cache.findOne('accounts', {name: config.masterName}, function(err, masterAccount) { - masterAccount.balance -= benefReward - transaction.updateGrowInts(masterAccount, currentVote.ts, function() { - transaction.adjustNodeAppr(masterAccount, benefReward, function() { - cb(newCoins, benefReward, newBurn) - }) + // updating the content + // increase the dist amount for display + // and update the votes array + cache.updateOne('contents', {_id: author+'/'+link}, { + $inc: {dist: newCoins}, + $set: {votes: newVotes} + }, function() { + if (config.masterFee > 0 && newCoins > 0) { + // apply the master fee + var distBefore = content.dist + if (!distBefore) distBefore = 0 + var distAfter = distBefore + newCoins + var benefReward = Math.floor(distAfter/config.masterFee) - Math.floor(distBefore/config.masterFee) + if (benefReward > 0) + cache.updateOne('accounts', {name: config.masterName}, {$inc: {balance: benefReward}}, function() { + cache.insertOne('distributed', { + name: config.masterName, + dist: benefReward, + ts: currentVote.ts, + _id: content.author+'/'+content.link+'/'+currentVote.u+'/'+config.masterName + }, function() { + cache.findOne('accounts', {name: config.masterName}, function(err, masterAccount) { + masterAccount.balance -= benefReward + transaction.updateGrowInts(masterAccount, currentVote.ts, function() { + transaction.adjustNodeAppr(masterAccount, benefReward, function() { + cb(newCoins, benefReward, newBurn) }) }) }) }) - else cb(newCoins, 0) - } else cb(newCoins, 0) - }) + }) + else cb(newCoins, 0) + } else cb(newCoins, 0) }) }) }, - print: (vt, cb) => { + print: (vt) => { // loads current reward pool data // and converts VP to DTC based on reward pool stats - eco.rewardPool(function(stats) { - // if reward pool is empty, print nothing - // (can only happen if witnesses freeze distribution in settings) - if (stats.avail === 0) { - cb(0) - return - } + let stats = eco.rewardPool() + // if reward pool is empty, print nothing + // (can only happen if witnesses freeze distribution in settings) + if (stats.avail === 0) + return 0 - var thNewCoins = 0 + var thNewCoins = 0 - // if theres no vote in reward pool stats, we print 1 coin (minimum) - if (stats.votes === 0) - thNewCoins = 1 - // otherwise we proportionally reduce based on recent votes weight - // and how much is available for printing - else - thNewCoins = stats.avail * Math.abs((vt) / stats.votes) + // if theres no vote in reward pool stats, we print 1 coin (minimum) + if (stats.votes === 0) + thNewCoins = 1 + // otherwise we proportionally reduce based on recent votes weight + // and how much is available for printing + else + thNewCoins = stats.avail * Math.abs((vt) / stats.votes) - // rounding down - thNewCoins = Math.floor(thNewCoins*Math.pow(10, config.ecoClaimPrecision))/Math.pow(10, config.ecoClaimPrecision) - - // and making sure one person cant empty the whole pool when network has been inactive - // e.g. when stats.votes close to 0 - // then vote value will be capped to rewardPoolMaxShare % - if (thNewCoins > Math.floor(stats.avail*config.rewardPoolMaxShare)) - thNewCoins = Math.floor(stats.avail*config.rewardPoolMaxShare) + // rounding down + thNewCoins = Math.floor(thNewCoins*Math.pow(10, config.ecoClaimPrecision))/Math.pow(10, config.ecoClaimPrecision) + + // and making sure one person cant empty the whole pool when network has been inactive + // e.g. when stats.votes close to 0 + // then vote value will be capped to rewardPoolMaxShare % + if (thNewCoins > Math.floor(stats.avail*config.rewardPoolMaxShare)) + thNewCoins = Math.floor(stats.avail*config.rewardPoolMaxShare) - logr.econ('PRINT:'+vt+' VT => '+thNewCoins+' dist', stats.avail) - cb(thNewCoins) - }) + logr.econ('PRINT:'+vt+' VT => '+thNewCoins+' dist', stats.avail) + return thNewCoins }, rentability: (ts1, ts2) => { var ts = ts2 - ts1 From 9bc47da7a7cd79afb733317fafa535da90dbffec Mon Sep 17 00:00:00 2001 From: techcoderx Date: Sun, 30 May 2021 19:59:29 +0800 Subject: [PATCH 04/33] burn up to available balance only --- src/chain.js | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/chain.js b/src/chain.js index 1dbe4fb..1bc75f8 100644 --- a/src/chain.js +++ b/src/chain.js @@ -705,6 +705,8 @@ chain = { // do nothing if there is none to burn if (burnAccount.balance <= 0) return rs(0) + // burn only up to available balance + burnAmount = Math.min(burnAmount,burnAccount.balance) cache.updateOne('accounts', {name: config.burnAccount}, {$inc: {balance: -burnAmount}},() => transaction.updateGrowInts(burnAccount, block.timestamp, () => { transaction.adjustNodeAppr(burnAccount, -burnAmount, () => { From 265c0d9b31aa2b37eb41abd34f3b886a35c2ce94 Mon Sep 17 00:00:00 2001 From: techcoderx Date: Sun, 30 May 2021 20:48:54 +0800 Subject: [PATCH 05/33] base bw growth and preloaded vp for new paid accounts --- src/config.js | 4 +++- src/transaction.js | 15 ++++++++++++--- src/transactions/newAccount.js | 16 ++++++++++++++-- 3 files changed, 29 insertions(+), 6 deletions(-) diff --git a/src/config.js b/src/config.js index cbab496..6308ade 100644 --- a/src/config.js +++ b/src/config.js @@ -142,7 +142,9 @@ var config = { 15000000: { maxKeys: 25, disallowVotingInactiveLeader: true, - burnAccount: 'dtube.airdrop' + burnAccount: 'dtube.airdrop', + preloadVt: 50, // 50% of vtPerBurn + preloadBwGrowth: 2 // x2 more time of bwGrowth } }, read: (blockNum) => { diff --git a/src/transaction.js b/src/transaction.js index 61c63ee..293c4cb 100644 --- a/src/transaction.js +++ b/src/transaction.js @@ -119,7 +119,10 @@ transaction = { cb(false, 'user has no bandwidth object'); return } - var newBw = new GrowInt(legitUser.bw, {growth:legitUser.balance/(config.bwGrowth), max:config.bwMax}).grow(ts) + var newBw = new GrowInt(legitUser.bw, { + growth: Math.max(legitUser.baseBwGrowth || 0, legitUser.balance)/(config.bwGrowth), + max: config.bwMax + }).grow(ts) if (!newBw) { logr.debug(legitUser) @@ -156,7 +159,10 @@ transaction = { collectGrowInts: (tx, ts, cb) => { cache.findOne('accounts', {name: tx.sender}, function(err, account) { // collect bandwidth - var bandwidth = new GrowInt(account.bw, {growth:account.balance/(config.bwGrowth), max:config.bwMax}) + var bandwidth = new GrowInt(account.bw, { + growth: Math.max(account.baseBwGrowth || 0, account.balance)/(config.bwGrowth), + max: config.bwMax + }) var needed_bytes = JSON.stringify(tx).length var bw = bandwidth.grow(ts) if (!bw) @@ -220,7 +226,10 @@ transaction = { if (!account.bw || !account.vt) logr.debug('error loading grow int', account) - var bw = new GrowInt(account.bw, {growth:account.balance/(config.bwGrowth), max:config.bwMax}).grow(ts) + var bw = new GrowInt(account.bw, { + growth: Math.max(account.baseBwGrowth || 0, account.balance)/(config.bwGrowth), + max: config.bwMax + }).grow(ts) var vt = new GrowInt(account.vt, {growth:account.balance/(config.vtGrowth)}).grow(ts) if (!bw || !vt) { logr.fatal('error growing grow int', account, ts) diff --git a/src/transactions/newAccount.js b/src/transactions/newAccount.js index b221779..82be8ad 100644 --- a/src/transactions/newAccount.js +++ b/src/transactions/newAccount.js @@ -37,12 +37,24 @@ module.exports = { }) }, execute: (tx, ts, cb) => { + let newAccBw = {v:0,t:0} + let newAccVt = {v:0,t:0} + let baseBwGrowth = 0 + if (!tx.sender !== config.masterName || config.masterPaysForUsernames) { + if (config.preloadVt) + newAccVt = {v:eco.accountPrice(tx.data.name)*config.vtPerBurn*config.preloadVt/100,t:ts} + if (config.preloadBwGrowth) { + newAccBw = {v:0,t:ts} + baseBwGrowth = Math.floor(eco.accountPrice(tx.data.name)/config.preloadBwGrowth) + } + } cache.insertOne('accounts', { name: tx.data.name.toLowerCase(), pub: tx.data.pub, balance: 0, - bw: {v:0,t:0}, - vt: {v:0,t:0}, + bw: newAccBw, + vt: newAccVt, + baseBwGrowth: baseBwGrowth, follows: [], followers: [], keys: [], From b9476bfb99ba16730815377324c34765b7e9abf0 Mon Sep 17 00:00:00 2001 From: techcoderx Date: Mon, 31 May 2021 20:49:03 +0800 Subject: [PATCH 06/33] multisig setup transactions --- src/cli.js | 50 +++++++++++++++++++++++++++ src/clicmds.js | 17 +++++++++ src/config.js | 3 +- src/transactions/index.js | 10 ++++-- src/transactions/newWeightedKey.js | 24 +++++++++++++ src/transactions/setPasswordWeight.js | 14 ++++++++ src/transactions/setSignThreshold.js | 29 ++++++++++++++++ 7 files changed, 144 insertions(+), 3 deletions(-) create mode 100644 src/transactions/newWeightedKey.js create mode 100644 src/transactions/setPasswordWeight.js create mode 100644 src/transactions/setSignThreshold.js diff --git a/src/cli.js b/src/cli.js index 00e4eaa..9ad5872 100644 --- a/src/cli.js +++ b/src/cli.js @@ -187,6 +187,23 @@ program.command('new-key ') writeLine(' $ new-key finance wyPSnqfmAKoz5gAWyPcND7Rot6es2aFgcDGDTYB89b4q [3] -F key.json -M alice') }) +program.command('new-weighted-key ') + .description('add new key with custom perms and weight') + .action(function(id, pub, allowedTxs, weight) { + verifyAndSendTx('newWeightedKey', id, pub, allowedTxs, weight) + }).on('--help', function(){ + writeLine('') + writeLine('Transaction Types:') + for (const key in TransactionType) + writeLine(' '+TransactionType[key]+': '+key) + writeLine('') + writeLine('WARNING: Multi-signature setup is for advanced users only.') + writeLine('') + writeLine('Examples:') + writeLine(' $ new-key posting tWWLqc5wPTbXPaWrFAfqUwGtEBLmUbyavp3utwPUop2g [4,5,6,7,8] 1 -F key.json -M alice') + writeLine(' $ new-key finance wyPSnqfmAKoz5gAWyPcND7Rot6es2aFgcDGDTYB89b4q [3] 2 -F key.json -M alice') + }) + program.command('password ') .description('change your master key') .action(function(pub) { @@ -203,6 +220,22 @@ program.command('password ') writeLine(' $ change-password tK9DqTygrcwGWZPsyVtZXNpfiZcAZN83nietKbKY8aiH -F key.json -M alice') }) +program.command('password-weight ') + .description('set signature thresholds for transaction types') + .action(function(weight) { + verifyAndSendTx('setPasswordWeight', weight) + }).on('--help', function(){ + writeLine('') + writeLine('Arguments:') + writeLine(' : the new weight of the master key') + writeLine('') + writeLine('WARNING: Multi-signature setup is for advanced users only.') + writeLine('Please choose the weight carefully to prevent being locked out from your account!') + writeLine('') + writeLine('Example:') + writeLine(' $ password-weight 1 -F key.json -M alice') + }) + program.command('profile ') .alias('user-json') .description('modify an account profile') @@ -258,6 +291,23 @@ program.command('remove-key ') writeLine(' $ remove-key posting -F key.json -M alice') }) +program.command('signature-threshold ') + .alias('sig-threshold') + .description('set signature thresholds for transaction types') + .action(function(thresholds) { + verifyAndSendTx('setSignatureThreshold', thresholds) + }).on('--help', function(){ + writeLine('') + writeLine('Arguments:') + writeLine(' : Stringified json of the list of thresholds for the tx types as well as a default value if any') + writeLine('') + writeLine('WARNING: Multi-signature setup is for advanced users only.') + writeLine(' Please choose the thresholds carefully to prevent being locked out of your account due to insufficient key weight to meet the new signature threshold!') + writeLine('') + writeLine('Example:') + writeLine(' $ set-signature-threshold \'{"default":1,"2":3}\' -F key.json -M alice') + }) + program.command('sign ') .description('sign a tx w/o broadcasting') .action(function(transaction) { diff --git a/src/clicmds.js b/src/clicmds.js index 14a77f4..ac3f201 100644 --- a/src/clicmds.js +++ b/src/clicmds.js @@ -161,6 +161,23 @@ let cmds = { author+'", "vt": '+ parseInt(weight)+', "tag": "'+tag+'", "tip": ' + parseInt(tip) + '}}' return sign(privkey, sender, tx) + }, + + newWeightedKey: (privKey, sender, id, pub, types, weight) => { + let tx = '{"type":20,"data":{"id":"'+ + id+'","pub":"'+ + pub+'","types":'+types+',"weight":'+weight+'}}' + return sign(privKey, sender, tx) + }, + + setSignatureThreshold: (privKey, sender, thresholds) => { + let tx = '{"type":21,"data":{"thresholds":'+thresholds+'}}' + return sign(privKey, sender, tx) + }, + + setPasswordWeight: (privKey, sender, weight) => { + let tx = '{"type":22,"data":{"weight":'+weight+'}}' + return sign(privKey, sender, tx) } } diff --git a/src/config.js b/src/config.js index 6308ade..4025f31 100644 --- a/src/config.js +++ b/src/config.js @@ -144,7 +144,8 @@ var config = { disallowVotingInactiveLeader: true, burnAccount: 'dtube.airdrop', preloadVt: 50, // 50% of vtPerBurn - preloadBwGrowth: 2 // x2 more time of bwGrowth + preloadBwGrowth: 2, // x2 more time of bwGrowth + multisig: true } }, read: (blockNum) => { diff --git a/src/transactions/index.js b/src/transactions/index.js index 6275085..05f76e5 100644 --- a/src/transactions/index.js +++ b/src/transactions/index.js @@ -22,7 +22,10 @@ var transactions = [ require('./limitVt.js'), require('./claimReward.js'), require('./enableNode.js'), - require('./tippedVote.js') + require('./tippedVote.js'), + require('./newWeightedKey.js'), + require('./setSignThreshold.js'), + require('./setPasswordWeight.js') ] module.exports = { @@ -46,7 +49,10 @@ module.exports = { LIMIT_VT: 16, CLAIM_REWARD: 17, ENABLE_NODE: 18, - TIPPED_VOTE: 19 + TIPPED_VOTE: 19, + NEW_WEIGHTED_KEY: 20, + SET_SIG_THRESHOLD: 21, + SET_PASSWORD_WEIGHT: 22 }, validate: (tx, ts, legitUser, cb) => { // logr.debug('tx:'+tx.type+' validation begins') diff --git a/src/transactions/newWeightedKey.js b/src/transactions/newWeightedKey.js new file mode 100644 index 0000000..01e4285 --- /dev/null +++ b/src/transactions/newWeightedKey.js @@ -0,0 +1,24 @@ +module.exports = { + fields: ['id', 'pub', 'types', 'weight'], + validate: (tx, ts, legitUser, cb) => { + if (!config.multisig) + return cb(false, 'multisig is disabled') + + // validate key weight + if (!validate.integer(tx.data.weight,false,false)) + return cb(false, 'invalid tx data.weight must be a positive integer') + + // other validations are the same as NEW_KEY + require('./newKey').validate(tx,ts,legitUser,cb) + }, + execute: (tx, ts, cb) => { + // same as NEW_KEY + cache.updateOne('accounts', { + name: tx.sender + },{ $push: { + keys: tx.data + }},function(){ + cb(true) + }) + } +} \ No newline at end of file diff --git a/src/transactions/setPasswordWeight.js b/src/transactions/setPasswordWeight.js new file mode 100644 index 0000000..3e990d5 --- /dev/null +++ b/src/transactions/setPasswordWeight.js @@ -0,0 +1,14 @@ +module.exports = { + fields: ['weight'], + validate: (tx, ts, legitUser, cb) => { + if (!config.multisig) + cb(false, 'multisig is disabled') + else if (!validate.integer(tx.data.weight,false,false)) + cb(false, 'invalid tx data.weight must be a positive integer') + else + cb(true) + }, + execute: (tx, ts, cb) => { + cache.updateOne('accounts', {name: tx.sender}, {$set: {pub_weight: tx.data.weight}}, () => cb(true)) + } +} \ No newline at end of file diff --git a/src/transactions/setSignThreshold.js b/src/transactions/setSignThreshold.js new file mode 100644 index 0000000..65ed566 --- /dev/null +++ b/src/transactions/setSignThreshold.js @@ -0,0 +1,29 @@ +module.exports = { + fields: ['thresholds'], + validate: (tx, ts, legitUser, cb) => { + if (!config.multisig) + return cb(false, 'multisig is disabled') + + if (!validate.json(tx.data.thresholds,config.jsonMaxBytes)) + return cb(false, 'invalid tx data.threshold json') + + for (let t in tx.data.thresholds) { + if (t !== 'default' && (t !== parseInt(t).toString() || !validate.integer(parseInt(t),true,false))) + return cb(false, 'invalid tx type ' + t) + + if (!validate.integer(tx.data.thresholds[t],false,false,Number.MAX_SAFE_INTEGER,1)) + return cb(false, 'invalid threshold for tx type ' + t) + } + + cb(true) + }, + execute: (tx, ts, cb) => { + cache.findOne('accounts', {name: tx.sender}, (e,acc) => { + cache.updateOne('accounts', {name: tx.sender}, { + $set: { + thresholds: !acc.thresholds ? tx.data.thresholds : Object.assign(acc.thresholds,tx.data.thresholds) + } + }, () => cb(true)) + }) + } +} \ No newline at end of file From bd93c9bdf77e26e11370c082d81a60e2f4975383 Mon Sep 17 00:00:00 2001 From: techcoderx Date: Mon, 7 Jun 2021 20:20:21 +0800 Subject: [PATCH 07/33] multisig verify --- src/chain.js | 47 +++++++++++++++++++++++++++++++++++++++------- src/transaction.js | 9 ++++++++- 2 files changed, 48 insertions(+), 8 deletions(-) diff --git a/src/chain.js b/src/chain.js index 1bc75f8..f94e633 100644 --- a/src/chain.js +++ b/src/chain.js @@ -362,27 +362,40 @@ chain = { cb(false); return } // main key can authorize all transactions - var allowedPubKeys = [account.pub] + let allowedPubKeys = [[account.pub, account.pub_weight || 1]] + let threshold = 1 // add all secondary keys having this transaction type as allowed keys if (account.keys && typeof txType === 'number' && Number.isInteger(txType)) for (let i = 0; i < account.keys.length; i++) if (account.keys[i].types.indexOf(txType) > -1) - allowedPubKeys.push(account.keys[i].pub) + allowedPubKeys.push([account.keys[i].pub, account.keys[i].weight || 1]) // if there is no transaction type // it means we are verifying a block signature // so only the leader key is allowed if (txType === null) if (account.pub_leader) - allowedPubKeys = [account.pub_leader] + allowedPubKeys = [[account.pub_leader, 1]] else allowedPubKeys = [] + else { + // compute required signature threshold + if (account.thresholds && account.thresholds[txType]) + threshold = account.thresholds[txType] + else if (account.thresholds && account.thresholds.default) + threshold = account.thresholds.default + } + + // multisig transactions + if (config.multisig && Array.isArray(sign)) + return chain.isValidMultisig(account,threshold,allowedPubKeys,hash,sign,cb) + // single signature for (let i = 0; i < allowedPubKeys.length; i++) { - var bufferHash = Buffer.from(hash, 'hex') - var b58sign = bs58.decode(sign) - var b58pub = bs58.decode(allowedPubKeys[i]) - if (secp256k1.ecdsaVerify(b58sign, bufferHash, b58pub)) { + let bufferHash = Buffer.from(hash, 'hex') + let b58sign = bs58.decode(sign) + let b58pub = bs58.decode(allowedPubKeys[i][0]) + if (secp256k1.ecdsaVerify(b58sign, bufferHash, b58pub) && allowedPubKeys[i[1]] >= threshold) { cb(account) return } @@ -390,6 +403,26 @@ chain = { cb(false) }) }, + isValidMultisig: (account,threshold,allowedPubKeys,hash,signatures,cb) => { + let validWeights = 0 + let validSigs = [] + let hashBuf = Buffer.from(hash, 'hex') + for (let s = 0; s < signatures.length; s++) { + let signBuf = bs58.decode(signatures[s][0]) + let recoveredPub = bs58.encode(secp256k1.ecdsaRecover(signBuf,signatures[s][1],hashBuf)) + if (validSigs.includes(recoveredPub)) + return cb(false, 'duplicate signatures found') + for (let p = 0; p < allowedPubKeys.length; p++) + if (allowedPubKeys[p][0] === recoveredPub) { + validWeights += allowedPubKeys[p][1] + validSigs.push(recoveredPub) + } + } + if (validWeights >= threshold) + cb(account) + else + cb(false, 'insufficient signature weight ' + validWeights + ' to reach threshold of ' + threshold) + }, isValidHashAndSignature: (newBlock, cb) => { // and that the hash is correct var theoreticalHash = chain.calculateHashForBlock(newBlock) diff --git a/src/transaction.js b/src/transaction.js index 293c4cb..3eda56c 100644 --- a/src/transaction.js +++ b/src/transaction.js @@ -83,9 +83,16 @@ transaction = { if (!tx.hash || typeof tx.hash !== 'string') { cb(false, 'invalid tx hash'); return } - if (!tx.signature || typeof tx.signature !== 'string') { + if (!tx.signature || (typeof tx.signature !== 'string' && !(config.multisig && Array.isArray(tx.signature)))) { cb(false, 'invalid tx signature'); return } + // multisig transactions check + // signatures in multisig txs contain an array of signatures and recid + if (config.multisig && Array.isArray(tx.signature)) + for (let s = 0; s < tx.signature.length; s++) + if (!Array.isArray(tx.signature[s]) || tx.signature[s].length !== 2 || typeof tx.signature[s][0] !== 'string' || !Number.isInteger(tx.signature[s][1])) + return cb(false, 'invalid multisig tx signature #'+s) + // enforce transaction limits if (config.txLimits[tx.type] && config.txLimits[tx.type] === 1) { cb(false, 'transaction type is disabled'); return From fd1055a1e45074b7cd648f55db6c1ddbd3def6d0 Mon Sep 17 00:00:00 2001 From: techcoderx Date: Fri, 11 Jun 2021 16:16:01 +0800 Subject: [PATCH 08/33] mini cleanup --- src/config.js | 1 - src/main.js | 2 +- src/p2p.js | 22 ++++++++++------------ 3 files changed, 11 insertions(+), 14 deletions(-) diff --git a/src/config.js b/src/config.js index 4025f31..182cfa8 100644 --- a/src/config.js +++ b/src/config.js @@ -108,7 +108,6 @@ var config = { // precision of author tip percentage // 1 => 10% step, 2 => 1% step, 3 => 0.1% step, 4 => 0.01% step tippedVotePrecision: 2, - tmpForceTs: true, // the time after which transactions expire and wont be accepted by nodes anymore txExpirationTime: 60000, // limit which transactions are available diff --git a/src/main.js b/src/main.js index a52490e..34f6830 100644 --- a/src/main.js +++ b/src/main.js @@ -110,7 +110,7 @@ function startDaemon() { // start the websocket server p2p.init() // and connect to peers - p2p.connect(process.env.PEERS ? process.env.PEERS.split(',') : []) + p2p.connect(process.env.PEERS ? process.env.PEERS.split(',') : [], true) // regularly clean up old txs from mempool setInterval(function() { diff --git a/src/p2p.js b/src/p2p.js index e7d39cf..2554379 100644 --- a/src/p2p.js +++ b/src/p2p.js @@ -40,7 +40,7 @@ var p2p = { setTimeout(function(){p2p.recover()}, replay_interval) if (!process.env.NO_DISCOVERY || process.env.NO_DISCOVERY === '0' || process.env.NO_DISCOVERY === 0) { setInterval(function(){p2p.discoveryWorker()}, discovery_interval) - p2p.discoveryWorker() + p2p.discoveryWorker(true) } setInterval(function(){p2p.cleanRoundConfHistory()}, history_interval) }, @@ -48,7 +48,7 @@ var p2p = { p2p.nodeId = chain.getNewKeyPair() logr.info('P2P ID: '+p2p.nodeId.pub) }, - discoveryWorker: () => { + discoveryWorker: (isInit = false) => { var leaders = chain.generateLeaders(false, config.leaders*3, 0) for (let i = 0; i < leaders.length; i++) { if (p2p.sockets.length >= max_peers) { @@ -76,18 +76,18 @@ var p2p = { } } if (!isConnected) { - logr.info('Trying to connect to '+leaders[i].name+' '+leaders[i].json.node.ws) - p2p.connect([leaders[i].json.node.ws]) + logr[isInit ? 'info' : 'debug']('Trying to connect to '+leaders[i].name+' '+leaders[i].json.node.ws) + p2p.connect([leaders[i].json.node.ws],isInit) } } } }, - connect: (newPeers) => { + connect: (newPeers,isInit = false) => { newPeers.forEach((peer) => { var ws = new WebSocket(peer) ws.on('open', () => p2p.handshake(ws)) ws.on('error', () => { - logr.warn('peer connection failed', peer) + logr[isInit ? 'warn' : 'debug']('peer connection failed', peer) }) }) }, @@ -285,12 +285,10 @@ var p2p = { // it should come from one of the elected leaders, so let's verify signature if (p2p.recovering) return if (!message.s || !message.s.s || !message.s.n) return - if (config.tmpForceTs) { - if (!message.d || !message.d.ts || - typeof message.d.ts != 'number' || - message.d.ts + 2*config.blockTime < new Date().getTime() || - message.d.ts - 2*config.blockTime > new Date().getTime()) return - } + if (!message.d || !message.d.ts || + typeof message.d.ts != 'number' || + message.d.ts + 2*config.blockTime < new Date().getTime() || + message.d.ts - 2*config.blockTime > new Date().getTime()) return logr.cons(message.s.n+' U-R'+message.d.r) From fdda00b880f8fbc7efbcfa4b7c7dd9223d9585c1 Mon Sep 17 00:00:00 2001 From: techcoderx Date: Fri, 11 Jun 2021 17:48:31 +0800 Subject: [PATCH 09/33] /tx and /history from extended api --- src/http/history/index.js | 51 ++++++++++++++++++++++----------------- src/http/tx/index.js | 17 +++++++++++++ 2 files changed, 46 insertions(+), 22 deletions(-) create mode 100644 src/http/tx/index.js diff --git a/src/http/history/index.js b/src/http/history/index.js index da9f071..6f2cbfd 100644 --- a/src/http/history/index.js +++ b/src/http/history/index.js @@ -1,34 +1,41 @@ module.exports = { init: (app) => { // account history api - app.get('/history/:author/:lastBlock', (req, res) => { - var lastBlock = parseInt(req.params.lastBlock) - var author = req.params.author - var query = { + app.get('/history/:author/:lastBlock/:skip?', (req, res) => { + let lastBlock = parseInt(req.params.lastBlock) + let skip = parseInt(req.params.skip) + let author = req.params.author + let query = { $and: [ - { - $or: [ - { 'txs.sender': author }, - { 'txs.data.target': author }, - { 'txs.data.receiver': author }, - { 'txs.data.pa': author }, - { 'txs.data.author': author } - ] - } + { $or: [ + {'txs.sender': author}, + {'txs.data.target': author}, + {'txs.data.receiver': author}, + {'txs.data.pa': author}, + {'txs.data.author': author} + ]} ] } - if (lastBlock > 0) - query['$and'].push({ _id: { $lt: lastBlock } }) - - db.collection('blocks').find(query, { sort: { _id: -1 }, limit: 50 }).toArray(function (err, blocks) { + let filter = { + sort: {_id: -1}, + limit: 50 + } + + if (lastBlock > 0) + query['$and'].push({_id: {$lt: lastBlock}}) + + if (!isNaN(skip) && skip > 0) + filter.skip = skip + + db.collection('blocks').find(query, filter).toArray(function(err, blocks) { for (let b = 0; b < blocks.length; b++) { - var newTxs = [] + let newTxs = [] for (let t = 0; t < blocks[b].txs.length; t++) if (blocks[b].txs[t].sender === author - || blocks[b].txs[t].data.target === author - || blocks[b].txs[t].data.receiver === author - || blocks[b].txs[t].data.pa === author - || blocks[b].txs[t].data.author === author) + || blocks[b].txs[t].data.target === author + || blocks[b].txs[t].data.receiver === author + || blocks[b].txs[t].data.pa === author + || blocks[b].txs[t].data.author === author) newTxs.push(blocks[b].txs[t]) blocks[b].txs = newTxs } diff --git a/src/http/tx/index.js b/src/http/tx/index.js new file mode 100644 index 0000000..8ea90c5 --- /dev/null +++ b/src/http/tx/index.js @@ -0,0 +1,17 @@ +module.exports = { + init: (app) => { + // tx lookup by hash + app.get('/tx/:txhash',(req,res) => { + db.collection('blocks').findOne({ "txs.hash": req.params.txhash }, { projection: { txs: { $elemMatch: { hash: req.params.txhash}}}},(error,tx) => { + if (error) + res.status(500).send(error) + else if (tx && tx.txs) { + let result = tx.txs[0] + result.includedInBlock = tx._id + res.send(result) + } else + res.status(404).send({error: 'transaction not found'}) + }) + }) + } +} \ No newline at end of file From f40902ffdb4697f3cab63b28a9a806e2ddbc8231 Mon Sep 17 00:00:00 2001 From: techcoderx Date: Fri, 11 Jun 2021 18:29:01 +0800 Subject: [PATCH 10/33] fix --- src/chain.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/chain.js b/src/chain.js index f94e633..ea79294 100644 --- a/src/chain.js +++ b/src/chain.js @@ -395,7 +395,7 @@ chain = { let bufferHash = Buffer.from(hash, 'hex') let b58sign = bs58.decode(sign) let b58pub = bs58.decode(allowedPubKeys[i][0]) - if (secp256k1.ecdsaVerify(b58sign, bufferHash, b58pub) && allowedPubKeys[i[1]] >= threshold) { + if (secp256k1.ecdsaVerify(b58sign, bufferHash, b58pub) && allowedPubKeys[i][1] >= threshold) { cb(account) return } From 3fb33453715664a63dcd3be240d20814eb786773 Mon Sep 17 00:00:00 2001 From: techcoderx Date: Fri, 11 Jun 2021 20:54:14 +0800 Subject: [PATCH 11/33] fix leader warmup --- src/cache.js | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/src/cache.js b/src/cache.js index 132a0ab..412cc1a 100644 --- a/src/cache.js +++ b/src/cache.js @@ -337,9 +337,12 @@ var cache = { } }, warmupLeaders: (cb) => { - db.collection('accounts').find( - {pub_leader: {$exists:true}} - ).toArray((e,accs) => { + db.collection('accounts').find({ + $and: [ + {pub_leader: {$exists:true}}, + {pub_leader: {$ne: ""}} + ] + }).toArray((e,accs) => { if (e) throw e for (let i in accs) { cache.leaders[accs[i].name] = 1 From 5abdd236a8ada41e605361db66b77182ec414b48 Mon Sep 17 00:00:00 2001 From: techcoderx Date: Sun, 20 Jun 2021 12:43:03 +0800 Subject: [PATCH 12/33] eco.round and eco.floor helpers --- src/chain.js | 4 ++-- src/economics.js | 24 +++++++++++++----------- src/transaction.js | 4 ++-- 3 files changed, 17 insertions(+), 15 deletions(-) diff --git a/src/chain.js b/src/chain.js index ea79294..9f7051f 100644 --- a/src/chain.js +++ b/src/chain.js @@ -321,8 +321,8 @@ chain = { if (chain.nextOutput.txs>1) output += 's' - output += ' dist: '+chain.nextOutput.dist - output += ' burn: '+chain.nextOutput.burn + output += ' dist: '+eco.round(chain.nextOutput.dist) + output += ' burn: '+eco.round(chain.nextOutput.burn) output += ' delay: '+ (currentOutTime - block.timestamp) if (block.missedBy && !rebuilding) diff --git a/src/economics.js b/src/economics.js index 3ea6939..ebe241a 100644 --- a/src/economics.js +++ b/src/economics.js @@ -86,10 +86,10 @@ var eco = { distributed += eco.currentBlock.dist votes += eco.currentBlock.votes - avail = Math.round(avail*Math.pow(10, config.ecoClaimPrecision))/Math.pow(10, config.ecoClaimPrecision) - burned = Math.round(burned*Math.pow(10, config.ecoClaimPrecision))/Math.pow(10, config.ecoClaimPrecision) - distributed = Math.round(distributed*Math.pow(10, config.ecoClaimPrecision))/Math.pow(10, config.ecoClaimPrecision) - votes = Math.round(votes*Math.pow(10, config.ecoClaimPrecision))/Math.pow(10, config.ecoClaimPrecision) + avail = eco.round(avail) + burned = eco.round(burned) + distributed = eco.round(distributed) + votes = eco.round(votes) return { theo: theoricalPool, burn: burned, @@ -137,14 +137,14 @@ var eco = { var won = thNewCoins * winners[i].share var rentabilityWinner = eco.rentability(winners[i].ts, currentVote.ts) won *= rentabilityWinner - won = Math.floor(won*Math.pow(10, config.ecoClaimPrecision))/Math.pow(10, config.ecoClaimPrecision) + won = eco.floor(won) winners[i].gross += won newCoins += won delete winners[i].share // logr.econ(winners[i].u+' wins '+won+' coins with rentability '+rentabilityWinner) } - newCoins = Math.round(newCoins*Math.pow(10, config.ecoClaimPrecision))/Math.pow(10, config.ecoClaimPrecision) + newCoins = eco.round(newCoins) // reconstruct the votes array var newVotes = [] @@ -175,7 +175,7 @@ var eco = { } i-- } - newBurn = Math.round(newBurn*Math.pow(10, config.ecoClaimPrecision))/Math.pow(10, config.ecoClaimPrecision) + newBurn = eco.round(newBurn) logr.econ(newCoins + ' dist from the vote') logr.econ(newBurn + ' burn from the vote') @@ -207,9 +207,9 @@ var eco = { // add dist/burn/votes to currentBlock eco stats eco.currentBlock.dist += newCoins - eco.currentBlock.dist = Math.round(eco.currentBlock.dist*Math.pow(10, config.ecoClaimPrecision))/Math.pow(10, config.ecoClaimPrecision) + eco.currentBlock.dist = eco.round(eco.currentBlock.dist) eco.currentBlock.burn += newBurn - eco.currentBlock.burn = Math.round(eco.currentBlock.burn*Math.pow(10, config.ecoClaimPrecision))/Math.pow(10, config.ecoClaimPrecision) + eco.currentBlock.burn = eco.round(eco.currentBlock.burn) eco.currentBlock.votes += currentVote.vt // updating the content @@ -268,7 +268,7 @@ var eco = { thNewCoins = stats.avail * Math.abs((vt) / stats.votes) // rounding down - thNewCoins = Math.floor(thNewCoins*Math.pow(10, config.ecoClaimPrecision))/Math.pow(10, config.ecoClaimPrecision) + thNewCoins = eco.floor(thNewCoins) // and making sure one person cant empty the whole pool when network has been inactive // e.g. when stats.votes close to 0 @@ -315,7 +315,9 @@ var eco = { rentability = Math.floor(rentability*Math.pow(10, config.ecoRentPrecision))/Math.pow(10, config.ecoRentPrecision) return rentability - } + }, + round: (val = 0) => Math.round(val*Math.pow(10,config.ecoClaimPrecision))/Math.pow(10,config.ecoClaimPrecision), + floor: (val = 0) => Math.floor(val*Math.pow(10,config.ecoClaimPrecision))/Math.pow(10,ecoClaimPrecision) } module.exports = eco \ No newline at end of file diff --git a/src/transaction.js b/src/transaction.js index 3eda56c..82720de 100644 --- a/src/transaction.js +++ b/src/transaction.js @@ -118,9 +118,9 @@ transaction = { cb(false, 'invalid tx hash does not match'); return } // checking transaction signature - chain.isValidSignature(tx.sender, tx.type, tx.hash, tx.signature, function(legitUser) { + chain.isValidSignature(tx.sender, tx.type, tx.hash, tx.signature, function(legitUser,e) { if (!legitUser) { - cb(false, 'invalid signature'); return + cb(false, e || 'invalid signature'); return } if (!legitUser.bw) { cb(false, 'user has no bandwidth object'); return From e7f1eb8f7b74d30ee8177a6cdc32beba53e569ab Mon Sep 17 00:00:00 2001 From: techcoderx Date: Sun, 20 Jun 2021 15:19:00 +0800 Subject: [PATCH 13/33] f --- src/economics.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/economics.js b/src/economics.js index ebe241a..3ed98a4 100644 --- a/src/economics.js +++ b/src/economics.js @@ -317,7 +317,7 @@ var eco = { return rentability }, round: (val = 0) => Math.round(val*Math.pow(10,config.ecoClaimPrecision))/Math.pow(10,config.ecoClaimPrecision), - floor: (val = 0) => Math.floor(val*Math.pow(10,config.ecoClaimPrecision))/Math.pow(10,ecoClaimPrecision) + floor: (val = 0) => Math.floor(val*Math.pow(10,config.ecoClaimPrecision))/Math.pow(10,config.ecoClaimPrecision) } module.exports = eco \ No newline at end of file From de83216fb61ef10c1d56b868c5d4e49e050511cf Mon Sep 17 00:00:00 2001 From: techcoderx Date: Sun, 20 Jun 2021 16:50:00 +0800 Subject: [PATCH 14/33] optimized reward pool state --- src/chain.js | 7 +++-- src/economics.js | 71 +++++++++++++++++++++++++++++++++++++----------- src/mongo.js | 1 + 3 files changed, 61 insertions(+), 18 deletions(-) diff --git a/src/chain.js b/src/chain.js index 9f7051f..6d849ee 100644 --- a/src/chain.js +++ b/src/chain.js @@ -249,7 +249,7 @@ chain = { db.collection('blocks').insertOne(block, function(err) { if (err) throw err // push cached accounts and contents to mongodb - + eco.appendHistory(block) chain.cleanMemory() // update the config if an update was scheduled @@ -771,6 +771,7 @@ chain = { cleanMemory: () => { chain.cleanMemoryBlocks() chain.cleanMemoryTx() + eco.cleanHistory() }, cleanMemoryBlocks: () => { if (config.ecoBlocksIncreasesSoon) { @@ -780,7 +781,7 @@ chain = { var extraBlocks = chain.recentBlocks.length - config.ecoBlocks while (extraBlocks > 0) { - chain.recentBlocks.splice(0,1) + chain.recentBlocks.shift() extraBlocks-- } }, @@ -805,6 +806,7 @@ chain = { // Genesis block is handled differently if (blockNum === 0) { + eco.history = [{_id: 0, votes: 0, cDist: 0, cBurn: 0}] chain.recentBlocks = [chain.getGenesisBlock()] chain.minerSchedule(chain.getGenesisBlock(),(sch) => { chain.schedule = sch @@ -842,6 +844,7 @@ chain = { // update the config if an update was scheduled config = require('./config.js').read(blockToRebuild._id) eco.nextBlock() + eco.appendHistory(blockToRebuild) chain.cleanMemory() let writeInterval = parseInt(process.env.REBUILD_WRITE_INTERVAL) diff --git a/src/economics.js b/src/economics.js index 3ed98a4..0c4b7d9 100644 --- a/src/economics.js +++ b/src/economics.js @@ -25,6 +25,7 @@ var eco = { burn: 0, votes: 0 }, + history: [], nextBlock: () => { eco.currentBlock.dist = 0 eco.currentBlock.burn = 0 @@ -36,30 +37,68 @@ var eco = { inflation: () => { return config.rewardPoolMult * config.rewardPoolUsers + config.rewardPoolMin }, + loadHistory: () => { + eco.history = [] + let lastCBurn = 0 + let lastCDist = 0 + let firstBlockIndex = chain.recentBlocks.length - config.ecoBlocks + if (firstBlockIndex < 0) firstBlockIndex = 0 + for (let i = firstBlockIndex; i < chain.recentBlocks.length; i++) { + const block = chain.recentBlocks[i] + if (block.burn) + lastCBurn += block.burn + if (block.dist) + lastCDist += block.dist + + eco.history.push({_id: block._id, votes: eco.tallyVotes(block.txs)}) + } + + eco.history[eco.history.length-1].cDist = eco.round(lastCDist) + eco.history[eco.history.length-1].cBurn = eco.round(lastCBurn) + }, + appendHistory: (nextBlock) => { + // nextBlock should yet to be added to recentBlocks + let lastIdx = chain.recentBlocks.length-config.ecoBlocks + let oldDist = lastIdx >= 0 ? chain.recentBlocks[lastIdx].dist || 0 : 0 + let oldBurn = lastIdx >= 0 ? chain.recentBlocks[lastIdx].burn || 0 : 0 + eco.history.push({ + _id: nextBlock._id, + votes: eco.tallyVotes(nextBlock.txs), + cDist: eco.round(eco.history[eco.history.length-1].cDist - oldDist + (nextBlock.dist || 0)), + cBurn: eco.round(eco.history[eco.history.length-1].cBurn - oldBurn + (nextBlock.burn || 0)) + }) + }, + cleanHistory: () => { + if (config.ecoBlocksIncreasesSoon) return + let extraBlocks = eco.history.length - config.ecoBlocks + while (extraBlocks > 0) { + eco.history.shift() + extraBlocks-- + } + }, + tallyVotes: (txs = []) => { + let votes = 0 + for (let y = 0; y < txs.length; y++) + if (txs[y].type === TransactionType.VOTE + || txs[y].type === TransactionType.COMMENT + || txs[y].type === TransactionType.PROMOTED_COMMENT + || (txs[y].type === TransactionType.TIPPED_VOTE && config.hotfix1)) + votes += Math.abs(txs[y].data.vt) + return votes + }, rewardPool: () => { let theoricalPool = eco.inflation() let burned = 0 let distributed = 0 let votes = 0 if (!eco.startRewardPool) { - let firstBlockIndex = chain.recentBlocks.length - config.ecoBlocks + distributed = eco.history[eco.history.length-1].cDist + burned = eco.history[eco.history.length-1].cBurn + let firstBlockIndex = eco.history.length - config.ecoBlocks if (firstBlockIndex < 0) firstBlockIndex = 0 let weight = 1 - for (let i = firstBlockIndex; i < chain.recentBlocks.length; i++) { - const block = chain.recentBlocks[i] - if (block.burn) - burned += block.burn - if (block.dist) - distributed += block.dist - - for (let y = 0; y < block.txs.length; y++) { - let tx = block.txs[y] - if (tx.type === TransactionType.VOTE - || tx.type === TransactionType.COMMENT - || tx.type === TransactionType.PROMOTED_COMMENT - || (tx.type === TransactionType.TIPPED_VOTE && config.hotfix1)) - votes += Math.abs(tx.data.vt)*weight - } + for (let i = firstBlockIndex; i < eco.history.length; i++) { + votes += eco.history[i].votes*weight weight++ } diff --git a/src/mongo.js b/src/mongo.js index b28f632..4aaec36 100644 --- a/src/mongo.js +++ b/src/mongo.js @@ -154,6 +154,7 @@ var mongo = { }).toArray(function(err, blocks) { if (err) throw err chain.recentBlocks = blocks.reverse() + eco.loadHistory() cb() }) }, From f88f521442e5234b9ff8228bf0a237c8be62d467 Mon Sep 17 00:00:00 2001 From: techcoderx Date: Sun, 20 Jun 2021 20:26:49 +0800 Subject: [PATCH 15/33] reset eco.history on hf3 --- src/chain.js | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/src/chain.js b/src/chain.js index 6d849ee..ba4dff2 100644 --- a/src/chain.js +++ b/src/chain.js @@ -249,12 +249,12 @@ chain = { db.collection('blocks').insertOne(block, function(err) { if (err) throw err // push cached accounts and contents to mongodb - eco.appendHistory(block) chain.cleanMemory() // update the config if an update was scheduled config = require('./config.js').read(block._id) - + chain.applyHardfork(block._id) + eco.appendHistory(block) eco.nextBlock() if (!p2p.recovering) { @@ -790,6 +790,11 @@ chain = { if (chain.recentTxs[hash].ts + config.txExpirationTime < chain.getLatestBlock().timestamp) delete chain.recentTxs[hash] }, + applyHardfork: (blockNum) => { + // Update memory state on hardfork execution + if (blockNum === 4860000) + eco.loadHistory() // reset previous votes + }, batchLoadBlocks: (blockNum,cb) => { if (chain.blocksToRebuild.length == 0) { db.collection('blocks').find({_id: { $gte: blockNum, $lt: blockNum+max_batch_blocks }}).toArray((e,blocks) => { @@ -843,6 +848,7 @@ chain = { // update the config if an update was scheduled config = require('./config.js').read(blockToRebuild._id) + chain.applyHardfork(blockToRebuild._id) eco.nextBlock() eco.appendHistory(blockToRebuild) chain.cleanMemory() From c2ed775f8fed40dee5c03f1f190cc6ce142bdef8 Mon Sep 17 00:00:00 2001 From: techcoderx Date: Fri, 25 Jun 2021 14:39:38 +0800 Subject: [PATCH 16/33] reduce callbacks in main.js --- src/cache.js | 23 +++++++------ src/main.js | 91 +++++++++++++++++++++++++--------------------------- src/mongo.js | 13 ++++---- 3 files changed, 62 insertions(+), 65 deletions(-) diff --git a/src/cache.js b/src/cache.js index 412cc1a..ac6f50f 100644 --- a/src/cache.js +++ b/src/cache.js @@ -301,11 +301,10 @@ var cache = { return '_id' } }, - warmup: function(collection, maxDoc, cb) { - if (!collection || !maxDoc || maxDoc === 0) { - cb(null) - return - } + warmup: (collection, maxDoc) => new Promise((rs,rj) => { + if (!collection || !maxDoc || maxDoc === 0) + return rs(null) + switch (collection) { case 'accounts': db.collection(collection).find({}, { @@ -315,7 +314,7 @@ var cache = { if (err) throw err for (let i = 0; i < accounts.length; i++) cache[collection][accounts[i].name] = accounts[i] - cb(null) + rs(null) }) break @@ -327,16 +326,16 @@ var cache = { if (err) throw err for (let i = 0; i < contents.length; i++) cache[collection][contents[i]._id] = contents[i] - cb(null) + rs(null) }) break default: - cb('Collection type not found') + rj('Collection type not found') break } - }, - warmupLeaders: (cb) => { + }), + warmupLeaders: () => new Promise((rs) => { db.collection('accounts').find({ $and: [ {pub_leader: {$exists:true}}, @@ -349,9 +348,9 @@ var cache = { if (!cache.accounts[accs[i].name]) cache.accounts[accs[i].name] = accs[i] } - cb(accs.length) + rs(accs.length) }) - } + }) } module.exports = cache \ No newline at end of file diff --git a/src/main.js b/src/main.js index 34f6830..c4e19c6 100644 --- a/src/main.js +++ b/src/main.js @@ -23,54 +23,51 @@ if (allowNodeV.indexOf(currentNodeV) === -1) { erroredRebuild = false // init the database and load most recent blocks in memory directly -mongo.init(function() { - var timeStart = new Date().getTime() - cache.warmup('accounts', parseInt(process.env.WARMUP_ACCOUNTS), function(err) { - if (err) throw err - logr.info(Object.keys(cache.accounts).length+' acccounts loaded in RAM in '+(new Date().getTime()-timeStart)+' ms') - timeStart = new Date().getTime() - - cache.warmup('contents', parseInt(process.env.WARMUP_CONTENTS), function(err) { - if (err) throw err - logr.info(Object.keys(cache.contents).length+' contents loaded in RAM in '+(new Date().getTime()-timeStart)+' ms') - timeStart = new Date().getTime() - - cache.warmupLeaders((leaderCount)=>{ - logr.info(leaderCount+' leaders loaded in RAM in '+(new Date().getTime()-timeStart)+' ms') - - // Rebuild chain state if specified. This verifies the integrity of every block and transactions and rebuild the state. - let rebuildResumeBlock = parseInt(process.env.REBUILD_RESUME_BLK) - let isResumingRebuild = !isNaN(rebuildResumeBlock) && rebuildResumeBlock > 0 - if ((process.env.REBUILD_STATE === '1' || process.env.REBUILD_STATE === 1) && !isResumingRebuild) { - logr.info('Chain state rebuild requested, unzipping blocks.zip...') - mongo.restoreBlocks((e)=>{ - if (e) return logr.error(e) - startRebuild(0) - }) - return - } - - mongo.lastBlock(function(block) { - // Resuming an interrupted rebuild - if (isResumingRebuild) { - logr.info('Resuming interrupted rebuild from block ' + rebuildResumeBlock) - config = require('./config').read(rebuildResumeBlock - 1) - chain.restoredBlocks = block._id - mongo.fillInMemoryBlocks(() => - db.collection('blocks').findOne({_id:rebuildResumeBlock-1 - (rebuildResumeBlock-1)%config.leaders},(e,b) => - chain.minerSchedule(b,(sch) => { - chain.schedule = sch - startRebuild(rebuildResumeBlock) - })),rebuildResumeBlock) - return - } - logr.info('#' + block._id + ' is the latest block in our db') - config = require('./config.js').read(block._id) - mongo.fillInMemoryBlocks(startDaemon) - }) - }) +mongo.init(async function() { + // Warmup accounts + let timeStart = new Date().getTime() + await cache.warmup('accounts', parseInt(process.env.WARMUP_ACCOUNTS)) + logr.info(Object.keys(cache.accounts).length+' acccounts loaded in RAM in '+(new Date().getTime()-timeStart)+' ms') + + // Warmup contents + timeStart = new Date().getTime() + await cache.warmup('contents', parseInt(process.env.WARMUP_CONTENTS)) + logr.info(Object.keys(cache.contents).length+' contents loaded in RAM in '+(new Date().getTime()-timeStart)+' ms') + + // Warmup leaders + timeStart = new Date().getTime() + let leaderCount = await cache.warmupLeaders() + logr.info(leaderCount+' leaders loaded in RAM in '+(new Date().getTime()-timeStart)+' ms') + + // Rebuild chain state if specified. This verifies the integrity of every block and transactions and rebuild the state. + let rebuildResumeBlock = parseInt(process.env.REBUILD_RESUME_BLK) + let isResumingRebuild = !isNaN(rebuildResumeBlock) && rebuildResumeBlock > 0 + if ((process.env.REBUILD_STATE === '1' || process.env.REBUILD_STATE === 1) && !isResumingRebuild) { + logr.info('Chain state rebuild requested, unzipping blocks.zip...') + mongo.restoreBlocks((e)=>{ + if (e) return logr.error(e) + startRebuild(0) }) - }) + return + } + + let block = await mongo.lastBlock() + // Resuming an interrupted rebuild + if (isResumingRebuild) { + logr.info('Resuming interrupted rebuild from block ' + rebuildResumeBlock) + config = require('./config').read(rebuildResumeBlock - 1) + chain.restoredBlocks = block._id + mongo.fillInMemoryBlocks(() => + db.collection('blocks').findOne({_id:rebuildResumeBlock-1 - (rebuildResumeBlock-1)%config.leaders},(e,b) => + chain.minerSchedule(b,(sch) => { + chain.schedule = sch + startRebuild(rebuildResumeBlock) + })),rebuildResumeBlock) + return + } + logr.info('#' + block._id + ' is the latest block in our db') + config = require('./config.js').read(block._id) + mongo.fillInMemoryBlocks(startDaemon) }) function startRebuild(startBlock) { diff --git a/src/mongo.js b/src/mongo.js index 4aaec36..0fadabe 100644 --- a/src/mongo.js +++ b/src/mongo.js @@ -158,14 +158,14 @@ var mongo = { cb() }) }, - lastBlock: (cb) => { + lastBlock: () => new Promise((rs,rj) => { db.collection('blocks').findOne({}, { sort: {_id: -1} }, function(err, block) { - if (err) throw err - cb(block) + if (err) return rj(err) + rs(block) }) - }, + }), restoreBlocks: (cb) => { let dump_dir = process.cwd() + '/dump' let dump_location = dump_dir + '/blocks.zip' @@ -194,7 +194,8 @@ var mongo = { } }) - mongorestore.on('close', () => db.collection('blocks').findOne({_id: 0}, (gError,gBlock) => mongo.lastBlock((block) => { + mongorestore.on('close', () => db.collection('blocks').findOne({_id: 0}, async (gError,gBlock) => { + let block = await mongo.lastBlock() if (gError) throw gError if (!gBlock) return cb('Genesis block not found in dump') if (gBlock.hash !== config.originHash)return cb('Genesis block hash in dump does not match config.originHash') @@ -202,7 +203,7 @@ var mongo = { logr.info('Finished importing ' + block._id + ' blocks') chain.restoredBlocks = block._id cb(null) - }))) + })) }) } } From 988d0dab072ab1dea279e8d4a12aab6b882da69e Mon Sep 17 00:00:00 2001 From: techcoderx Date: Fri, 25 Jun 2021 15:15:41 +0800 Subject: [PATCH 17/33] remove callback on minerSchedule() --- src/chain.js | 84 ++++++++++++++++------------------------------------ src/main.js | 13 ++++---- 2 files changed, 31 insertions(+), 66 deletions(-) diff --git a/src/chain.js b/src/chain.js index ba4dff2..3bc507b 100644 --- a/src/chain.js +++ b/src/chain.js @@ -259,40 +259,22 @@ chain = { if (!p2p.recovering) { // if block id is mult of n leaders, reschedule next n blocks - if (block._id % config.leaders === 0) - chain.minerSchedule(block, function(minerSchedule) { - chain.schedule = minerSchedule - chain.recentBlocks.push(block) - chain.minerWorker(block) - chain.output(block) - cache.writeToDisk(function() {}) - cb(true) - }) - else { - chain.recentBlocks.push(block) - chain.minerWorker(block) - chain.output(block) - cache.writeToDisk(function() {}) - cb(true) - } + if (block._id % config.leaders === 0) + chain.schedule = chain.minerSchedule(block) + chain.recentBlocks.push(block) + chain.minerWorker(block) + chain.output(block) + cache.writeToDisk(function() {}) + cb(true) } else { // if we are recovering we wait for mongo to update cache.writeToDisk(function() { if (block._id % config.leaders === 0) - chain.minerSchedule(block, function(minerSchedule) { - chain.schedule = minerSchedule - chain.recentBlocks.push(block) - chain.minerWorker(block) - chain.output(block) - - cb(true) - }) - else { - chain.recentBlocks.push(block) - chain.minerWorker(block) - chain.output(block) - cb(true) - } + chain.schedule = chain.minerSchedule(block) + chain.recentBlocks.push(block) + chain.minerWorker(block) + chain.output(block) + cb(true) }) } }) @@ -637,7 +619,7 @@ chain = { }) }) }, - minerSchedule: (block, cb) => { + minerSchedule: (block) => { var hash = block.hash var rand = parseInt('0x'+hash.substr(hash.length-config.leaderShufflePrecision)) if (!p2p.recovering) @@ -661,10 +643,10 @@ chain = { y++ } - cb({ + return { block: block, shuffle: shuffledMiners - }) + } }, generateLeaders: (withLeaderPub, limit, start) => { var leaders = [] @@ -813,10 +795,8 @@ chain = { if (blockNum === 0) { eco.history = [{_id: 0, votes: 0, cDist: 0, cBurn: 0}] chain.recentBlocks = [chain.getGenesisBlock()] - chain.minerSchedule(chain.getGenesisBlock(),(sch) => { - chain.schedule = sch - chain.rebuildState(blockNum+1,cb) - }) + chain.schedule = chain.minerSchedule(chain.getGenesisBlock()) + chain.rebuildState(blockNum+1,cb) return } @@ -859,27 +839,15 @@ chain = { cache.processRebuildOps(() => { if (blockToRebuild._id % config.leaders === 0) - chain.minerSchedule(blockToRebuild, function(minerSchedule) { - chain.schedule = minerSchedule - chain.recentBlocks.push(blockToRebuild) - chain.output(blockToRebuild, true) - - // process notifications (non blocking) - notifications.processBlock(blockToRebuild) - - // next block - chain.rebuildState(blockNum+1, cb) - }) - else { - chain.recentBlocks.push(blockToRebuild) - chain.output(blockToRebuild, true) - - // process notifications (non blocking) - notifications.processBlock(blockToRebuild) - - // next block - chain.rebuildState(blockNum+1, cb) - } + chain.schedule = chain.minerSchedule(blockToRebuild) + chain.recentBlocks.push(blockToRebuild) + chain.output(blockToRebuild, true) + + // process notifications (non blocking) + notifications.processBlock(blockToRebuild) + + // next block + chain.rebuildState(blockNum+1, cb) }, blockToRebuild._id % writeInterval === 0) }) }) diff --git a/src/main.js b/src/main.js index c4e19c6..53e0465 100644 --- a/src/main.js +++ b/src/main.js @@ -58,11 +58,10 @@ mongo.init(async function() { config = require('./config').read(rebuildResumeBlock - 1) chain.restoredBlocks = block._id mongo.fillInMemoryBlocks(() => - db.collection('blocks').findOne({_id:rebuildResumeBlock-1 - (rebuildResumeBlock-1)%config.leaders},(e,b) => - chain.minerSchedule(b,(sch) => { - chain.schedule = sch - startRebuild(rebuildResumeBlock) - })),rebuildResumeBlock) + db.collection('blocks').findOne({_id:rebuildResumeBlock-1 - (rebuildResumeBlock-1)%config.leaders},(e,b) => { + chain.schedule = chain.minerSchedule(b) + startRebuild(rebuildResumeBlock) + }),rebuildResumeBlock) return } logr.info('#' + block._id + ' is the latest block in our db') @@ -95,9 +94,7 @@ function startDaemon() { // start miner schedule db.collection('blocks').findOne({_id: chain.getLatestBlock()._id - (chain.getLatestBlock()._id % config.leaders)}, function(err, block) { if (err) throw err - chain.minerSchedule(block, function(minerSchedule) { - chain.schedule = minerSchedule - }) + chain.schedule = chain.minerSchedule(block) }) // init hot/trending From 3a6208113c4da82c1f74250ab519ecb2eadcf213 Mon Sep 17 00:00:00 2001 From: techcoderx Date: Fri, 25 Jun 2021 16:35:19 +0800 Subject: [PATCH 18/33] leader stats from extended api --- src/cache.js | 15 +++--- src/chain.js | 6 ++- src/http/leader/index.js | 25 +++++++++ src/http/rank/index.js | 61 +++++++++++++++++++++ src/leaderStats.js | 111 +++++++++++++++++++++++++++++++++++++++ src/main.js | 4 ++ 6 files changed, 211 insertions(+), 11 deletions(-) create mode 100644 src/http/leader/index.js create mode 100644 src/http/rank/index.js create mode 100644 src/leaderStats.js diff --git a/src/cache.js b/src/cache.js index ac6f50f..f824fa2 100644 --- a/src/cache.js +++ b/src/cache.js @@ -248,15 +248,12 @@ var cache = { }) }) - // no operation compression (dumb and slow) - // for (let i = 0; i < cache.changes.length; i++) { - // executions.push(function(callback) { - // var change = cache.changes[i] - // db.collection(change.collection).updateOne(change.query, change.changes, function() { - // callback() - // }) - // }) - // } + // leader stats + if (process.env.LEADER_STATS === '1') { + let leaderStatsWriteOps = leaderStats.getWriteOps() + for (let op in leaderStatsWriteOps) + executions.push(leaderStatsWriteOps[op]) + } var timeBefore = new Date().getTime() parallel(executions, function(err, results) { diff --git a/src/chain.js b/src/chain.js index 3bc507b..2b42efa 100644 --- a/src/chain.js +++ b/src/chain.js @@ -190,8 +190,9 @@ chain = { if (!p2p.recovering) p2p.broadcastBlock(newBlock) - // process notifications (non blocking) + // process notifications and leader stats (non blocking) notifications.processBlock(newBlock) + leaderStats.processBlock(newBlock) // emit event to confirm new transactions in the http api for (let i = 0; i < newBlock.txs.length; i++) @@ -843,8 +844,9 @@ chain = { chain.recentBlocks.push(blockToRebuild) chain.output(blockToRebuild, true) - // process notifications (non blocking) + // process notifications and leader stats (non blocking) notifications.processBlock(blockToRebuild) + leaderStats.processBlock(blockToRebuild) // next block chain.rebuildState(blockNum+1, cb) diff --git a/src/http/leader/index.js b/src/http/leader/index.js new file mode 100644 index 0000000..8a76891 --- /dev/null +++ b/src/http/leader/index.js @@ -0,0 +1,25 @@ +module.exports = { + init: (app) => { + app.get('/leader/:account',(req,res) => { + if (!req.params.account) + res.status(404).send({error: 'account is required'}) + db.collection('accounts').findOne({name: req.params.account}, (e,acc) => { + if (e) return res.status(500).send(e) + if (!acc) return res.status(404).send({error: 'account does not exist'}) + if (!acc.pub_leader) return res.status(404).send({error: 'account does not contain a leader key'}) + res.send({ + name: acc.name, + balance: acc.balance, + node_appr: acc.node_appr, + pub_leader: acc.pub_leader, + subs: acc.followers.length, + subbed: acc.follows.length, + produced: leaderStats.leaders[acc.name].produced, + missed: leaderStats.leaders[acc.name].missed, + voters: leaderStats.leaders[acc.name].voters, + last: leaderStats.leaders[acc.name].last + }) + }) + }) + } +} \ No newline at end of file diff --git a/src/http/rank/index.js b/src/http/rank/index.js new file mode 100644 index 0000000..a394de7 --- /dev/null +++ b/src/http/rank/index.js @@ -0,0 +1,61 @@ +module.exports = { + init: (app) => { + app.get('/rank/:key',(req,res) => { + let sorting = {$sort: {}} + let projecting = { + $project: { + _id: 0, + name: 1, + balance: 1, + subs: { $size: "$followers" }, + subbed: { $size: "$follows" } + } + } + let matching = {$match:{}} + switch (req.params.key) { + case 'balance': + sorting.$sort.balance = -1 + break + case 'subs': + sorting.$sort.subs = -1 + break + case 'leaders': + if (process.env.LEADER_STATS !== '1') + return res.status(500).send({error: 'Leader stats module is disabled by node operator'}) + projecting.$project.node_appr = 1 + projecting.$project.pub_leader = 1 + projecting.$project.hasVote = { + $gt: ['$node_appr',0] + } + sorting.$sort.node_appr = -1 + matching.$match.hasVote = true + matching.$match.pub_leader = { $exists: true } + matching.$match.pub_leader = { $ne: '' } + break + default: + return res.status(400).send({error: 'invalid key'}) + } + + let aggregation = [projecting, sorting, {$limit: 100}] + if (req.params.key == 'leaders') + aggregation.push(matching) + + db.collection('accounts').aggregate(aggregation).toArray((e,r) => { + if (e) + return res.status(500).send(e) + if (req.params.key != 'leaders') + return res.send(r) + else { + for (let leader = 0; leader < r.length; leader++) { + delete r[leader].hasVote + r[leader].produced = leaderStats.leaders[r[leader].name].produced + r[leader].missed = leaderStats.leaders[r[leader].name].missed + r[leader].voters = leaderStats.leaders[r[leader].name].voters + r[leader].last = leaderStats.leaders[r[leader].name].last + } + res.send(r) + } + }) + }) + } +} \ No newline at end of file diff --git a/src/leaderStats.js b/src/leaderStats.js new file mode 100644 index 0000000..b3d4564 --- /dev/null +++ b/src/leaderStats.js @@ -0,0 +1,111 @@ +const parallel = require('run-parallel') + +// leader indexer from extended api +let indexer = { + leaders: { + dtube: { + produced: 1, + missed: 0, + voters: 1, // genesis + last: 0 + } + }, + updates: { + leaders: [] + }, + processBlock: (block) => { + if (process.env.LEADER_STATS !== '1') return + if (!block) + throw new Error('cannot process undefined block') + + // Setup new leader accounts + if (!indexer.leaders[block.miner]) + indexer.leaders[block.miner] = { + produced: 0, + missed: 0, + voters: 0, + last: 0 + } + if (block.missedBy && !indexer.leaders[block.missedBy]) + indexer.leaders[block.missedBy] = { + produced: 0, + missed: 0, + voters: 0, + last: 0 + } + + // Increment produced/missed + indexer.leaders[block.miner].produced += 1 + indexer.leaders[block.miner].last = block._id + if (block.missedBy) indexer.leaders[block.missedBy].missed += 1 + + if (!indexer.updates.leaders.includes(block.miner)) + indexer.updates.leaders.push(block.miner) + + if (block.missedBy && !indexer.updates.leaders.includes(block.missedBy)) + indexer.updates.leaders.push(block.missedBy) + + // Look for approves/disapproves in tx + for (let i = 0; i < block.txs.length; i++) + if (block.txs[i].type === 1) { + // APPROVE_NODE_OWNER + if (!indexer.leaders[block.txs[i].data.target]) indexer.leaders[block.txs[i].data.target] = { + produced: 0, + missed: 0, + voters: 0, + last: 0 + } + indexer.leaders[block.txs[i].data.target].voters += 1 + if (!indexer.updates.leaders.includes(block.txs[i].data.target)) + indexer.updates.leaders.push(block.txs[i].data.target) + } else if (block.txs[i].type === 2) { + // DISAPPROVE_NODE_OWNER + if (!indexer.leaders[block.txs[i].data.target]) indexer.leaders[block.txs[i].data.target] = { + produced: 0, + missed: 0, + voters: 0, + last: 0 + } + indexer.leaders[block.txs[i].data.target].voters -= 1 + if (!indexer.updates.leaders.includes(block.txs[i].data.target)) + indexer.updates.leaders.push(block.txs[i].data.target) + } else if (block.txs[i].type === 18 && !indexer.leaders[block.txs[i].sender]) { + // ENABLE_NODE + indexer.leaders[block.txs[i].sender] = { + produced: 0, + missed: 0, + voters: 0, + last: 0 + } + if (!indexer.updates.leaders.includes(block.txs[i].sender)) + indexer.updates.leaders.push(block.txs[i].sender) + } + }, + getWriteOps: () => { + if (process.env.LEADER_STATS !== '1') return [] + let ops = [] + for (let acc in indexer.updates.leaders) { + let updatedLeader = indexer.updates.leaders[acc] + ops.push((cb) => db.collection('leaders').updateOne({_id: updatedLeader },{ + $set: indexer.leaders[updatedLeader] + },{ upsert: true },(e) => cb(null,true))) + } + indexer.updates.leaders = [] + return ops + }, + loadIndex: () => { + return new Promise((rs,rj) => { + if (process.env.LEADER_STATS !== '1') return rs() + db.collection('leaders').find({},{}).toArray((e,leaders) => { + if (e) return rj(e) + if (leaders) for (let i in leaders) { + indexer.leaders[leaders[i]._id] = leaders[i] + delete indexer.leaders[leaders[i]._id]._id + } + rs() + }) + }) + } +} + +module.exports = indexer \ No newline at end of file diff --git a/src/main.js b/src/main.js index 53e0465..f20e83e 100644 --- a/src/main.js +++ b/src/main.js @@ -11,6 +11,7 @@ validate = require('./validate') eco = require('./economics.js') rankings = require('./rankings.js') consensus = require('./consensus') +leaderStats = require('./leaderStats') // verify node version var allowNodeV = [10, 12, 14] @@ -38,6 +39,9 @@ mongo.init(async function() { timeStart = new Date().getTime() let leaderCount = await cache.warmupLeaders() logr.info(leaderCount+' leaders loaded in RAM in '+(new Date().getTime()-timeStart)+' ms') + + // Warmup leader stats + await leaderStats.loadIndex() // Rebuild chain state if specified. This verifies the integrity of every block and transactions and rebuild the state. let rebuildResumeBlock = parseInt(process.env.REBUILD_RESUME_BLK) From 432c05aa1bfb267ed66a46a6601b256ae13c5fa7 Mon Sep 17 00:00:00 2001 From: techcoderx Date: Fri, 25 Jun 2021 19:26:24 +0800 Subject: [PATCH 19/33] nodejs v16 support --- src/main.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/main.js b/src/main.js index f20e83e..946601f 100644 --- a/src/main.js +++ b/src/main.js @@ -14,11 +14,13 @@ consensus = require('./consensus') leaderStats = require('./leaderStats') // verify node version -var allowNodeV = [10, 12, 14] +var allowNodeV = [10, 12, 14, 16] const currentNodeV = parseInt(process.versions.node.split('.')[0]) if (allowNodeV.indexOf(currentNodeV) === -1) { logr.fatal('Wrong NodeJS version. Allowed versions: v'+allowNodeV.join(', v')) process.exit(1) +} else if (currentNodeV === 10) { + logr.warn('NodeJS v10 has reached end of life, hence v10 support for Avalon will be removed in the future. Please upgrade to the latest supported NodeJS v' + allowNodeV[allowNodeV.length]) } else logr.info('Correctly using NodeJS v'+process.versions.node) erroredRebuild = false From 88750d5d534d3d46a0e413fc8d18f7f70e6ed180 Mon Sep 17 00:00:00 2001 From: techcoderx Date: Fri, 25 Jun 2021 21:41:18 +0800 Subject: [PATCH 20/33] $match fix in /rank/leader --- src/http/rank/index.js | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/http/rank/index.js b/src/http/rank/index.js index a394de7..2214ee3 100644 --- a/src/http/rank/index.js +++ b/src/http/rank/index.js @@ -29,8 +29,7 @@ module.exports = { } sorting.$sort.node_appr = -1 matching.$match.hasVote = true - matching.$match.pub_leader = { $exists: true } - matching.$match.pub_leader = { $ne: '' } + matching.$match.pub_leader = { $exists: true, $ne: '' } break default: return res.status(400).send({error: 'invalid key'}) From c8d1558e91641df0f848826edb66aacdf994e7aa Mon Sep 17 00:00:00 2001 From: techcoderx Date: Sun, 27 Jun 2021 09:22:46 +0800 Subject: [PATCH 21/33] fix invalid signature formats validation --- src/chain.js | 42 ++++++++++++++++++++++++------------------ 1 file changed, 24 insertions(+), 18 deletions(-) diff --git a/src/chain.js b/src/chain.js index 2b42efa..cde912a 100644 --- a/src/chain.js +++ b/src/chain.js @@ -374,15 +374,17 @@ chain = { return chain.isValidMultisig(account,threshold,allowedPubKeys,hash,sign,cb) // single signature - for (let i = 0; i < allowedPubKeys.length; i++) { - let bufferHash = Buffer.from(hash, 'hex') - let b58sign = bs58.decode(sign) - let b58pub = bs58.decode(allowedPubKeys[i][0]) - if (secp256k1.ecdsaVerify(b58sign, bufferHash, b58pub) && allowedPubKeys[i][1] >= threshold) { - cb(account) - return + try { + for (let i = 0; i < allowedPubKeys.length; i++) { + let bufferHash = Buffer.from(hash, 'hex') + let b58sign = bs58.decode(sign) + let b58pub = bs58.decode(allowedPubKeys[i][0]) + if (secp256k1.ecdsaVerify(b58sign, bufferHash, b58pub) && allowedPubKeys[i][1] >= threshold) { + cb(account) + return + } } - } + } catch {} cb(false) }) }, @@ -390,16 +392,20 @@ chain = { let validWeights = 0 let validSigs = [] let hashBuf = Buffer.from(hash, 'hex') - for (let s = 0; s < signatures.length; s++) { - let signBuf = bs58.decode(signatures[s][0]) - let recoveredPub = bs58.encode(secp256k1.ecdsaRecover(signBuf,signatures[s][1],hashBuf)) - if (validSigs.includes(recoveredPub)) - return cb(false, 'duplicate signatures found') - for (let p = 0; p < allowedPubKeys.length; p++) - if (allowedPubKeys[p][0] === recoveredPub) { - validWeights += allowedPubKeys[p][1] - validSigs.push(recoveredPub) - } + try { + for (let s = 0; s < signatures.length; s++) { + let signBuf = bs58.decode(signatures[s][0]) + let recoveredPub = bs58.encode(secp256k1.ecdsaRecover(signBuf,signatures[s][1],hashBuf)) + if (validSigs.includes(recoveredPub)) + return cb(false, 'duplicate signatures found') + for (let p = 0; p < allowedPubKeys.length; p++) + if (allowedPubKeys[p][0] === recoveredPub) { + validWeights += allowedPubKeys[p][1] + validSigs.push(recoveredPub) + } + } + } catch (e) { + return cb(false, 'invalid signatures: ' + e.toString()) } if (validWeights >= threshold) cb(account) From 453abcb155ada4d98dcc03f8fe05c0cccbad59dc Mon Sep 17 00:00:00 2001 From: techcoderx Date: Sun, 27 Jun 2021 15:15:47 +0800 Subject: [PATCH 22/33] account by key api --- src/http/accountByKey/index.js | 17 +++++++++++++++++ src/mongo.js | 21 +++++++++------------ 2 files changed, 26 insertions(+), 12 deletions(-) create mode 100644 src/http/accountByKey/index.js diff --git a/src/http/accountByKey/index.js b/src/http/accountByKey/index.js new file mode 100644 index 0000000..2871153 --- /dev/null +++ b/src/http/accountByKey/index.js @@ -0,0 +1,17 @@ +const parallel = require('run-parallel') + +module.exports = { + init: (app) => { + app.get('/accountByKey/:pub', (req,res) => { + let ops = [ + (cb) => db.collection('accounts').find({pub: req.params.pub}).toArray(cb), + (cb) => db.collection('accounts').find({'keys.pub': req.params.pub}).toArray(cb) + ] + parallel(ops,(errors,results) => { + if (errors) + return res.status(500).send(errors) + return res.send(results) + }) + }) + } +} \ No newline at end of file diff --git a/src/mongo.js b/src/mongo.js index 0fadabe..9d65895 100644 --- a/src/mongo.js +++ b/src/mongo.js @@ -132,18 +132,15 @@ var mongo = { }) }, - addMongoIndexes: (cb) => { - db.collection('accounts').createIndex( {name:1}, function() { - db.collection('accounts').createIndex( {balance:1}, function() { - db.collection('accounts').createIndex( {node_appr:1}, function() { - db.collection('contents').createIndex( {ts:1}, function() { - db.collection('contents').createIndex( {author:1}, function() { - cb() - }) - }) - }) - }) - }) + addMongoIndexes: async (cb) => { + await db.collection('accounts').createIndex({name:1}) + await db.collection('accounts').createIndex({balance:1}) + await db.collection('accounts').createIndex({node_appr:1}) + await db.collection('accounts').createIndex({pub:1}) + await db.collection('accounts').createIndex({'keys.pub':1}) + await db.collection('contents').createIndex({ts:1}) + await db.collection('contents').createIndex({author:1}) + cb() }, fillInMemoryBlocks: (cb,headBlock) => { let query = {} From 19e4cba817476f4c15d0bbd5c2845052cf7e9a46 Mon Sep 17 00:00:00 2001 From: techcoderx Date: Sun, 27 Jun 2021 16:08:01 +0800 Subject: [PATCH 23/33] wealth distribution api --- src/http/distribution/index.js | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 src/http/distribution/index.js diff --git a/src/http/distribution/index.js b/src/http/distribution/index.js new file mode 100644 index 0000000..4325b3f --- /dev/null +++ b/src/http/distribution/index.js @@ -0,0 +1,28 @@ +const parallel = require('run-parallel') + +module.exports = { + init: (app) => { + app.get('/distribution',(req,res) => { + let ops = [] + + // 0.01 <= $$$ < 1,000,000 DTUBE + for (let i = 0; i < 8; i++) + ops.push((cb) => db.collection('accounts').aggregate([ + {$match: {balance: {$gte: Math.pow(10,i), $lt: Math.pow(10,i+1)}}}, + {$group: {_id: i, sum: {$sum: "$balance"}, count: {$sum: 1}}} + ]).toArray((e,r) => cb(e,r[0]))) + + // >=1,000,000 DTUBE + ops.push((cb) => db.collection('accounts').aggregate([ + {$match: {balance: {$gte: Math.pow(10,8)}}}, + {$group: {_id: 8, sum: {$sum: "$balance"}, count: {$sum: 1}}} + ]).toArray((e,r) => cb(e,r[0]))) + + parallel(ops,(errors,results) => { + if (errors) + return res.status(500).send(errors) + return res.send(results) + }) + }) + } +} \ No newline at end of file From f81469e385fc8e7b582cc578fc2211b497394750 Mon Sep 17 00:00:00 2001 From: techcoderx Date: Sat, 3 Jul 2021 15:24:03 +0800 Subject: [PATCH 24/33] fix /accountPrice case insensitivity --- src/http/accountPrice/index.js | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/http/accountPrice/index.js b/src/http/accountPrice/index.js index 96985dc..8ded472 100644 --- a/src/http/accountPrice/index.js +++ b/src/http/accountPrice/index.js @@ -6,9 +6,10 @@ module.exports = { res.sendStatus(500) return } - db.collection('accounts').findOne({ name: req.params.name }, function (err, account) { + let user = req.params.name.toLowerCase() + db.collection('accounts').findOne({ name: user }, function (err, account) { if (account) res.send('Not Available') - else res.send(String(eco.accountPrice(req.params.name))) + else res.send(String(eco.accountPrice(user))) }) }) } From 6023f872e9b959c8f755e6f8a43ebd1204d1a907 Mon Sep 17 00:00:00 2001 From: techcoderx Date: Mon, 12 Jul 2021 09:40:30 +0800 Subject: [PATCH 25/33] fix distributed name --- src/leaderStats.js | 2 -- src/transactions/claimReward.js | 2 +- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/src/leaderStats.js b/src/leaderStats.js index b3d4564..64b42a9 100644 --- a/src/leaderStats.js +++ b/src/leaderStats.js @@ -1,5 +1,3 @@ -const parallel = require('run-parallel') - // leader indexer from extended api let indexer = { leaders: { diff --git a/src/transactions/claimReward.js b/src/transactions/claimReward.js index cdc51b4..bffc57d 100644 --- a/src/transactions/claimReward.js +++ b/src/transactions/claimReward.js @@ -47,7 +47,7 @@ module.exports = { $inc: {balance: reward} }, function() { cache.insertOne('distributed', { - name: config.masterName, + name: tx.sender, dist: reward, ts: ts, _id: content.author+'/'+content.link+'/claim/'+tx.sender From cca08f09567eeebd20437b316bf37d762c503ade Mon Sep 17 00:00:00 2001 From: techcoderx Date: Thu, 15 Jul 2021 12:42:16 +0800 Subject: [PATCH 26/33] block tx error should be logged accordingly --- src/chain.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/chain.js b/src/chain.js index cde912a..8232a76 100644 --- a/src/chain.js +++ b/src/chain.js @@ -576,7 +576,7 @@ chain = { }) }) else { - logr.debug(error, tx) + logr.error(error, tx) callback(null, false) } }) From 9561341aba262302e5e6117c5c21e2fe72ac8f83 Mon Sep 17 00:00:00 2001 From: techcoderx Date: Thu, 15 Jul 2021 16:22:20 +0800 Subject: [PATCH 27/33] writer queue for chain state db --- src/cache.js | 50 +++++++++++++++++++++++------------------- src/chain.js | 4 ++-- src/main.js | 17 ++++++++------ src/processingQueue.js | 26 ++++++++++++++++++++++ 4 files changed, 66 insertions(+), 31 deletions(-) create mode 100644 src/processingQueue.js diff --git a/src/cache.js b/src/cache.js index f824fa2..5dc6502 100644 --- a/src/cache.js +++ b/src/cache.js @@ -1,5 +1,6 @@ const parallel = require('run-parallel') const cloneDeep = require('clone-deep') +const ProcessingQueue = require('./processingQueue') var cache = { copy: { accounts: {}, @@ -17,6 +18,7 @@ var cache = { }, leaders: {}, leaderChanges: [], + writerQueue: new ProcessingQueue(), rollback: function() { // rolling back changes from copied documents for (const key in cache.copy.accounts) @@ -200,11 +202,16 @@ var cache = { cache.leaderChanges.push([leader,0]) }, clear: function() { - cache.accounts = {} - cache.contents = {} - cache.distributed = {} + cache.changes = [] + cache.inserts = [] + cache.rebuild.changes = [] + cache.rebuild.inserts = [] + cache.leaderChanges = [] + cache.copy.accounts = {} + cache.copy.contents = {} + cache.copy.distributed = {} }, - writeToDisk: function(cb, rebuild) { + writeToDisk: function(rebuild, cb) { // if (cache.inserts.length) logr.debug(cache.inserts.length+' Inserts') let executions = [] // executing the inserts (new comment / new account) @@ -255,23 +262,22 @@ var cache = { executions.push(leaderStatsWriteOps[op]) } - var timeBefore = new Date().getTime() - parallel(executions, function(err, results) { - let execTime = new Date().getTime()-timeBefore - if (!rebuild && execTime >= config.blockTime/2) - logr.warn('Slow write execution: ' + executions.length + ' mongo queries took ' + execTime + 'ms') - else - logr.debug(executions.length+' mongo queries executed in '+execTime+'ms') - cache.changes = [] - cache.inserts = [] - cache.rebuild.changes = [] - cache.rebuild.inserts = [] - cache.leaderChanges = [] - cache.copy.accounts = {} - cache.copy.contents = {} - cache.copy.distributed = {} - cb(err, results) - }) + if (typeof cb === 'function') { + let timeBefore = new Date().getTime() + parallel(executions, function(err, results) { + let execTime = new Date().getTime()-timeBefore + if (!rebuild && execTime >= config.blockTime/2) + logr.warn('Slow write execution: ' + executions.length + ' mongo queries took ' + execTime + 'ms') + else + logr.debug(executions.length+' mongo queries executed in '+execTime+'ms') + cache.clear() + cb(err, results) + }) + } else { + logr.debug(executions.length+' mongo ops queued') + cache.writerQueue.push((callback) => parallel(executions,() => callback())) + cache.clear() + } }, processRebuildOps: (cb,writeToDisk) => { for (let i in cache.inserts) @@ -285,7 +291,7 @@ var cache = { cache.copy.contents = {} cache.copy.distributed = {} if (writeToDisk) - cache.writeToDisk(cb,true) + cache.writeToDisk(true,cb) else cb() }, diff --git a/src/chain.js b/src/chain.js index 8232a76..8072e28 100644 --- a/src/chain.js +++ b/src/chain.js @@ -265,11 +265,11 @@ chain = { chain.recentBlocks.push(block) chain.minerWorker(block) chain.output(block) - cache.writeToDisk(function() {}) + cache.writeToDisk(false) cb(true) } else { // if we are recovering we wait for mongo to update - cache.writeToDisk(function() { + cache.writeToDisk(false,function() { if (block._id % config.leaders === 0) chain.schedule = chain.minerSchedule(block) chain.recentBlocks.push(block) diff --git a/src/main.js b/src/main.js index 946601f..9d412f4 100644 --- a/src/main.js +++ b/src/main.js @@ -88,11 +88,11 @@ function startRebuild(startBlock) { logr.info('Rebuilt ' + headBlockNum + ' blocks successfully in ' + (new Date().getTime() - rebuildStartTime) + ' ms') logr.info('Writing rebuild data to disk...') let cacheWriteStart = new Date().getTime() - cache.writeToDisk(() => { + cache.writeToDisk(true,() => { logr.info('Rebuild data written to disk in ' + (new Date().getTime() - cacheWriteStart) + ' ms') if (chain.shuttingDown) return process.exit(0) startDaemon() - },true) + }) }) } @@ -123,9 +123,12 @@ process.on('SIGINT', function() { closing = true chain.shuttingDown = true if (!erroredRebuild && chain.restoredBlocks && chain.getLatestBlock()._id < chain.restoredBlocks) return - logr.warn('Waiting '+config.blockTime+' ms before shut down...') - setTimeout(function() { - logr.info('Avalon exitted safely') - process.exit(0) - }, config.blockTime) + process.stdout.write('\r') + logr.info('Received SIGINT, completing writer queue...') + setInterval(() => { + if (cache.writerQueue.queue.length === 0) { + logr.info('Avalon exitted safely') + process.exit(0) + } + },500) }) \ No newline at end of file diff --git a/src/processingQueue.js b/src/processingQueue.js new file mode 100644 index 0000000..e6c028f --- /dev/null +++ b/src/processingQueue.js @@ -0,0 +1,26 @@ +class ProcessingQueue { + constructor() { + this.queue = [] + this.processing = false + } + + push(f = (cb) => cb()) { + this.queue.push(f) + if (!this.processing) { + this.processing = true + this.execute() + } + } + + execute() { + let first = this.queue.shift() + first(() => { + if (this.queue.length > 0) + this.execute() + else + this.processing = false + }) + } +} + +module.exports = ProcessingQueue \ No newline at end of file From 16bf13a451f3b0be8b7183255ab0f3b84bca3164 Mon Sep 17 00:00:00 2001 From: techcoderx Date: Sat, 17 Jul 2021 17:12:38 +0800 Subject: [PATCH 28/33] add new rebuild mode without signature verification --- src/chain.js | 4 ++++ src/main.js | 5 +++++ 2 files changed, 9 insertions(+) diff --git a/src/chain.js b/src/chain.js index 8072e28..867a7dc 100644 --- a/src/chain.js +++ b/src/chain.js @@ -343,7 +343,11 @@ chain = { if (err) throw err if (!account) { cb(false); return + } else if (chain.restoredBlocks && chain.getLatestBlock()._id < chain.restoredBlocks && process.env.REBUILD_NO_VERIFY === '1') { + // no verify rebuild mode, only use if you trust the contents of blocks.zip + return cb(account) } + // main key can authorize all transactions let allowedPubKeys = [[account.pub, account.pub_weight || 1]] let threshold = 1 diff --git a/src/main.js b/src/main.js index 9d412f4..0c34e34 100644 --- a/src/main.js +++ b/src/main.js @@ -48,6 +48,11 @@ mongo.init(async function() { // Rebuild chain state if specified. This verifies the integrity of every block and transactions and rebuild the state. let rebuildResumeBlock = parseInt(process.env.REBUILD_RESUME_BLK) let isResumingRebuild = !isNaN(rebuildResumeBlock) && rebuildResumeBlock > 0 + + // alert when rebuild without signture verification, only use if you know what you are doing + if (process.env.REBUILD_NO_VERIFY === '1' && (process.env.REBUILD_STATE === '1' || process.env.REBUILD_STATE === 1)) + logr.info('Rebuilding without signature verification. Only use this if you know what you are doing!') + if ((process.env.REBUILD_STATE === '1' || process.env.REBUILD_STATE === 1) && !isResumingRebuild) { logr.info('Chain state rebuild requested, unzipping blocks.zip...') mongo.restoreBlocks((e)=>{ From 9d51991f73a97b0dda732d8f4d351926418575a4 Mon Sep 17 00:00:00 2001 From: techcoderx Date: Sun, 18 Jul 2021 10:33:27 +0800 Subject: [PATCH 29/33] improved node sync, refresh when getting too behind of other peers --- src/http/recover/index.js | 8 ++++++++ src/main.js | 2 +- src/p2p.js | 31 ++++++++++++++++++++++++------- 3 files changed, 33 insertions(+), 8 deletions(-) create mode 100644 src/http/recover/index.js diff --git a/src/http/recover/index.js b/src/http/recover/index.js new file mode 100644 index 0000000..61a3e81 --- /dev/null +++ b/src/http/recover/index.js @@ -0,0 +1,8 @@ +module.exports = { + init: (app) => { + app.get('/recover',(req,res) => { + p2p.refresh(true) + res.send({}) + }) + } +} \ No newline at end of file diff --git a/src/main.js b/src/main.js index 0c34e34..230fbec 100644 --- a/src/main.js +++ b/src/main.js @@ -135,5 +135,5 @@ process.on('SIGINT', function() { logr.info('Avalon exitted safely') process.exit(0) } - },500) + },1000) }) \ No newline at end of file diff --git a/src/p2p.js b/src/p2p.js index 2554379..5348a0f 100644 --- a/src/p2p.js +++ b/src/p2p.js @@ -37,7 +37,10 @@ var p2p = { server.on('connection', ws => p2p.handshake(ws)) logr.info('Listening websocket p2p port on: ' + p2p_port) logr.info('Version:',version) - setTimeout(function(){p2p.recover()}, replay_interval) + setTimeout(() => { + p2p.recover() + setInterval(() => p2p.refresh(), replay_interval) + }, replay_interval) if (!process.env.NO_DISCOVERY || process.env.NO_DISCOVERY === '0' || process.env.NO_DISCOVERY === 0) { setInterval(function(){p2p.discoveryWorker()}, discovery_interval) p2p.discoveryWorker(true) @@ -340,7 +343,6 @@ var p2p = { if (Object.keys(p2p.recoveredBlocks).length + p2p.recoveringBlocks.length > max_blocks_buffer) return if (!p2p.recovering) p2p.recovering = chain.getLatestBlock()._id - p2p.recovering++ var peersAhead = [] for (let i = 0; i < p2p.sockets.length; i++) if (p2p.sockets[i].node_status @@ -353,11 +355,26 @@ var p2p = { return } - var champion = peersAhead[Math.floor(Math.random()*peersAhead.length)] - p2p.sendJSON(champion, {t: MessageType.QUERY_BLOCK, d:p2p.recovering}) - p2p.recoveringBlocks.push(p2p.recovering) - - if (p2p.recovering%2) p2p.recover() + let champion = peersAhead[Math.floor(Math.random()*peersAhead.length)] + if (p2p.recovering+1 <= champion.node_status.head_block) { + p2p.recovering++ + p2p.sendJSON(champion, {t: MessageType.QUERY_BLOCK, d:p2p.recovering}) + p2p.recoveringBlocks.push(p2p.recovering) + logr.debug('query block #'+p2p.recovering+' -- head block: '+champion.node_status.head_block) + if (p2p.recovering%2) p2p.recover() + } + }, + refresh: (force = false) => { + if (p2p.recovering && !force) return + for (let i = 0; i < p2p.sockets.length; i++) + if (p2p.sockets[i].node_status + && p2p.sockets[i].node_status.head_block > chain.getLatestBlock()._id + 10 + && p2p.sockets[i].node_status.origin_block === config.originHash) { + logr.info('Catching up with network, head block: ' + p2p.sockets[i].node_status.head_block) + p2p.recovering = chain.getLatestBlock()._id + p2p.recover() + break + } }, errorHandler: (ws) => { ws.on('close', () => p2p.closeConnection(ws)) From eeb622054397bb108b6e9230497d2b95095d38b0 Mon Sep 17 00:00:00 2001 From: techcoderx Date: Sun, 18 Jul 2021 10:41:04 +0800 Subject: [PATCH 30/33] update dependency --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index b737da8..733dccb 100644 --- a/package.json +++ b/package.json @@ -29,7 +29,7 @@ "sharp": "^0.27.2", "signale": "^1.4.0", "simple-youtube-api": "^5.2.1", - "ws": "^7.0.0" + "ws": "^7.5.3" }, "engines": { "node": ">=4.3.2" From e97a57078862fe3824e804aaacb665d0aee42ba4 Mon Sep 17 00:00:00 2001 From: techcoderx Date: Sun, 18 Jul 2021 11:14:36 +0800 Subject: [PATCH 31/33] new option to restore from already unzipped blocks dump --- src/main.js | 4 ++-- src/mongo.js | 27 +++++++++++++++++++++------ 2 files changed, 23 insertions(+), 8 deletions(-) diff --git a/src/main.js b/src/main.js index 230fbec..999255e 100644 --- a/src/main.js +++ b/src/main.js @@ -20,7 +20,7 @@ if (allowNodeV.indexOf(currentNodeV) === -1) { logr.fatal('Wrong NodeJS version. Allowed versions: v'+allowNodeV.join(', v')) process.exit(1) } else if (currentNodeV === 10) { - logr.warn('NodeJS v10 has reached end of life, hence v10 support for Avalon will be removed in the future. Please upgrade to the latest supported NodeJS v' + allowNodeV[allowNodeV.length]) + logr.warn('NodeJS v10 has reached end of life, hence v10 support for Avalon will be removed in the future. Please upgrade to the latest supported NodeJS v' + allowNodeV[allowNodeV.length-1]) } else logr.info('Correctly using NodeJS v'+process.versions.node) erroredRebuild = false @@ -54,7 +54,7 @@ mongo.init(async function() { logr.info('Rebuilding without signature verification. Only use this if you know what you are doing!') if ((process.env.REBUILD_STATE === '1' || process.env.REBUILD_STATE === 1) && !isResumingRebuild) { - logr.info('Chain state rebuild requested, unzipping blocks.zip...') + logr.info('Chain state rebuild requested'+(process.env.UNZIP_BLOCKS === '1' ? ', unzipping blocks.zip...' : '')) mongo.restoreBlocks((e)=>{ if (e) return logr.error(e) startRebuild(0) diff --git a/src/mongo.js b/src/mongo.js index 9d65895..44052c6 100644 --- a/src/mongo.js +++ b/src/mongo.js @@ -166,20 +166,35 @@ var mongo = { restoreBlocks: (cb) => { let dump_dir = process.cwd() + '/dump' let dump_location = dump_dir + '/blocks.zip' + let blocks_bson = dump_dir + '/blocks.bson' + let blocks_meta = dump_dir + '/blocks.metadata.json' let mongoUri = db_url+'/'+db_name - try { - fs.statSync(dump_location) - } catch (err) { - return cb('blocks.zip file not found') + if (process.env.UNZIP_BLOCKS === '1') { + try { + fs.statSync(dump_location) + } catch (err) { + return cb('blocks.zip file not found') + } + } else { + try { + fs.statSync(blocks_bson) + fs.statSync(blocks_meta) + } catch { + return cb('blocks mongo dump files not found') + } } // Drop the existing blocks collection and replace with the dump db.collection('blocks').drop((e,ok) => { if (!ok) return cb('Failed to drop existing blocks data') - spawnSync('unzip',[dump_location,'-d',dump_dir]) - logr.info('Finished unzipping, importing blocks now...') + if (process.env.UNZIP_BLOCKS === '1') { + spawnSync('unzip',[dump_location,'-d',dump_dir]) + logr.info('Finished unzipping, importing blocks now...') + } else { + logr.info('Importing blocks for rebuild...') + } let mongorestore = spawn('mongorestore', ['--uri='+mongoUri, '-d', db_name, dump_dir]) mongorestore.stderr.on('data', (data) => { From 06f2c3027b03ca4c474c79e96fb9ec2e06c79193 Mon Sep 17 00:00:00 2001 From: techcoderx Date: Sun, 18 Jul 2021 13:32:22 +0800 Subject: [PATCH 32/33] bump version --- package.json | 2 +- scripts/start.sh | 1 + src/p2p.js | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index 733dccb..d476a8a 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "avalon", - "version": "1.0.0", + "version": "1.4.0", "description": "", "scripts": { "start": "node src/main.js", diff --git a/scripts/start.sh b/scripts/start.sh index fed0342..d6d87a6 100755 --- a/scripts/start.sh +++ b/scripts/start.sh @@ -19,6 +19,7 @@ #export NOTIFICATIONS=1 #export RANKINGS=1 #export CONTENTS=1 +#export LEADER_STATS=1 # Cache warmup option export WARMUP_ACCOUNTS=100000 diff --git a/src/p2p.js b/src/p2p.js index 5348a0f..df254a2 100644 --- a/src/p2p.js +++ b/src/p2p.js @@ -1,4 +1,4 @@ -const version = '1.3.1' +const version = '1.4' const default_port = 6001 const replay_interval = 1500 const discovery_interval = 60000 From 794a61c7f30072cf548d128d20cf5d086e75e056 Mon Sep 17 00:00:00 2001 From: techcoderx Date: Sun, 18 Jul 2021 15:24:46 +0800 Subject: [PATCH 33/33] increased to 15 leaders --- src/config.js | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/config.js b/src/config.js index 182cfa8..debb17a 100644 --- a/src/config.js +++ b/src/config.js @@ -138,13 +138,14 @@ var config = { 4860000: { hotfix1: true }, - 15000000: { + 8500050: { maxKeys: 25, disallowVotingInactiveLeader: true, burnAccount: 'dtube.airdrop', preloadVt: 50, // 50% of vtPerBurn preloadBwGrowth: 2, // x2 more time of bwGrowth - multisig: true + multisig: true, + leaders: 15 } }, read: (blockNum) => {