Skip to content

Commit

Permalink
Merge pull request #73 from dtube/hf4
Browse files Browse the repository at this point in the history
HF4 "Harry Potter"
  • Loading branch information
techcoderx authored Jul 23, 2021
2 parents a753eeb + 794a61c commit 2d9cfcb
Show file tree
Hide file tree
Showing 30 changed files with 1,066 additions and 458 deletions.
4 changes: 2 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "avalon",
"version": "1.0.0",
"version": "1.4.0",
"description": "",
"scripts": {
"start": "node src/main.js",
Expand Down Expand Up @@ -29,7 +29,7 @@
"sharp": "^0.27.2",
"signale": "^1.4.0",
"simple-youtube-api": "^5.2.1",
"ws": "^7.0.0"
"ws": "^7.5.3"
},
"engines": {
"node": ">=4.3.2"
Expand Down
1 change: 1 addition & 0 deletions scripts/start.sh
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
#export NOTIFICATIONS=1
#export RANKINGS=1
#export CONTENTS=1
#export LEADER_STATS=1

# Cache warmup option
export WARMUP_ACCOUNTS=100000
Expand Down
97 changes: 51 additions & 46 deletions src/cache.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
const parallel = require('run-parallel')
const cloneDeep = require('clone-deep')
const ProcessingQueue = require('./processingQueue')
var cache = {
copy: {
accounts: {},
Expand All @@ -17,6 +18,7 @@ var cache = {
},
leaders: {},
leaderChanges: [],
writerQueue: new ProcessingQueue(),
rollback: function() {
// rolling back changes from copied documents
for (const key in cache.copy.accounts)
Expand Down Expand Up @@ -200,11 +202,16 @@ var cache = {
cache.leaderChanges.push([leader,0])
},
clear: function() {
cache.accounts = {}
cache.contents = {}
cache.distributed = {}
cache.changes = []
cache.inserts = []
cache.rebuild.changes = []
cache.rebuild.inserts = []
cache.leaderChanges = []
cache.copy.accounts = {}
cache.copy.contents = {}
cache.copy.distributed = {}
},
writeToDisk: function(cb, rebuild) {
writeToDisk: function(rebuild, cb) {
// if (cache.inserts.length) logr.debug(cache.inserts.length+' Inserts')
let executions = []
// executing the inserts (new comment / new account)
Expand Down Expand Up @@ -248,33 +255,29 @@ var cache = {
})
})

// no operation compression (dumb and slow)
// for (let i = 0; i < cache.changes.length; i++) {
// executions.push(function(callback) {
// var change = cache.changes[i]
// db.collection(change.collection).updateOne(change.query, change.changes, function() {
// callback()
// })
// })
// }
// leader stats
if (process.env.LEADER_STATS === '1') {
let leaderStatsWriteOps = leaderStats.getWriteOps()
for (let op in leaderStatsWriteOps)
executions.push(leaderStatsWriteOps[op])
}

var timeBefore = new Date().getTime()
parallel(executions, function(err, results) {
let execTime = new Date().getTime()-timeBefore
if (!rebuild && execTime >= config.blockTime/2)
logr.warn('Slow write execution: ' + executions.length + ' mongo queries took ' + execTime + 'ms')
else
logr.debug(executions.length+' mongo queries executed in '+execTime+'ms')
cache.changes = []
cache.inserts = []
cache.rebuild.changes = []
cache.rebuild.inserts = []
cache.leaderChanges = []
cache.copy.accounts = {}
cache.copy.contents = {}
cache.copy.distributed = {}
cb(err, results)
})
if (typeof cb === 'function') {
let timeBefore = new Date().getTime()
parallel(executions, function(err, results) {
let execTime = new Date().getTime()-timeBefore
if (!rebuild && execTime >= config.blockTime/2)
logr.warn('Slow write execution: ' + executions.length + ' mongo queries took ' + execTime + 'ms')
else
logr.debug(executions.length+' mongo queries executed in '+execTime+'ms')
cache.clear()
cb(err, results)
})
} else {
logr.debug(executions.length+' mongo ops queued')
cache.writerQueue.push((callback) => parallel(executions,() => callback()))
cache.clear()
}
},
processRebuildOps: (cb,writeToDisk) => {
for (let i in cache.inserts)
Expand All @@ -288,7 +291,7 @@ var cache = {
cache.copy.contents = {}
cache.copy.distributed = {}
if (writeToDisk)
cache.writeToDisk(cb,true)
cache.writeToDisk(true,cb)
else
cb()
},
Expand All @@ -301,11 +304,10 @@ var cache = {
return '_id'
}
},
warmup: function(collection, maxDoc, cb) {
if (!collection || !maxDoc || maxDoc === 0) {
cb(null)
return
}
warmup: (collection, maxDoc) => new Promise((rs,rj) => {
if (!collection || !maxDoc || maxDoc === 0)
return rs(null)

switch (collection) {
case 'accounts':
db.collection(collection).find({}, {
Expand All @@ -315,7 +317,7 @@ var cache = {
if (err) throw err
for (let i = 0; i < accounts.length; i++)
cache[collection][accounts[i].name] = accounts[i]
cb(null)
rs(null)
})
break

Expand All @@ -327,28 +329,31 @@ var cache = {
if (err) throw err
for (let i = 0; i < contents.length; i++)
cache[collection][contents[i]._id] = contents[i]
cb(null)
rs(null)
})
break

default:
cb('Collection type not found')
rj('Collection type not found')
break
}
},
warmupLeaders: (cb) => {
db.collection('accounts').find(
{pub_leader: {$exists:true}}
).toArray((e,accs) => {
}),
warmupLeaders: () => new Promise((rs) => {
db.collection('accounts').find({
$and: [
{pub_leader: {$exists:true}},
{pub_leader: {$ne: ""}}
]
}).toArray((e,accs) => {
if (e) throw e
for (let i in accs) {
cache.leaders[accs[i].name] = 1
if (!cache.accounts[accs[i].name])
cache.accounts[accs[i].name] = accs[i]
}
cb(accs.length)
rs(accs.length)
})
}
})
}

module.exports = cache
Loading

0 comments on commit 2d9cfcb

Please sign in to comment.