fix(log): color messages (part 1)

This commit is contained in:
Alexey Kasyanchuk
2018-08-07 01:43:37 +03:00
parent aa68200bc5
commit 5bdab1b516
8 changed files with 76 additions and 76 deletions

View File

@ -19,7 +19,7 @@ module.exports = class Feed {
if(!this.loaded) if(!this.loaded)
return // feed not loaded on begining, ignore saving return // feed not loaded on begining, ignore saving
console.log('saving feed') logT('feed', 'saving feed')
config.feedDate = this.feedDate config.feedDate = this.feedDate
await this.sphinx.query('delete from feed where id > 0') await this.sphinx.query('delete from feed where id > 0')
let id = 0 let id = 0
@ -40,12 +40,12 @@ module.exports = class Feed {
this._order() this._order()
this.feedDate = config.feedDate this.feedDate = config.feedDate
this.loaded = true this.loaded = true
console.log('lodead feed') logT('feed', 'lodead feed')
} }
clear() clear()
{ {
console.log('clearing feed') logT('feed', 'clearing feed')
this.feed = [] this.feed = []
} }

View File

@ -3,7 +3,7 @@ module.exports = (sphinx, table, callback, doneCallback, max = 1000, where = '')
sphinx.query(`SELECT * FROM ${table} WHERE id > ${index} ${where} LIMIT ${max}`, (err, torrents) => { sphinx.query(`SELECT * FROM ${table} WHERE id > ${index} ${where} LIMIT ${max}`, (err, torrents) => {
const finish = () => { const finish = () => {
if(err) if(err)
console.log('big table parse error', err) logT('sql', 'big table parse error', err)
if(doneCallback) if(doneCallback)
doneCallback(true) doneCallback(true)
done(true) done(true)

View File

@ -167,9 +167,9 @@ const single = (callback) => {
}); });
mysqlSingle._mysql.on('error', (err) => { mysqlSingle._mysql.on('error', (err) => {
console.log('db error', err); logT('sql', 'db error', err);
if(err.code === 'PROTOCOL_CONNECTION_LOST') { // Connection to the MySQL server is usually if(err.code === 'PROTOCOL_CONNECTION_LOST') { // Connection to the MySQL server is usually
console.log('restart single sql connection') logT('sql', 'restart single sql connection')
mysqlSingle._mysql = undefined mysqlSingle._mysql = undefined
start(); // lost due to either server restart, or a start(); // lost due to either server restart, or a
} else { // connnection idle timeout (the wait_timeout } else { // connnection idle timeout (the wait_timeout

View File

@ -23,7 +23,7 @@ class p2p {
this.info = {} this.info = {}
if(!config.peerId) if(!config.peerId)
{ {
console.log('generate peerId') logT('p2p', 'generate peerId')
config.peerId = Math.random().toString(36).substring(2, 15) + Math.random().toString(36).substring(2, 15) config.peerId = Math.random().toString(36).substring(2, 15) + Math.random().toString(36).substring(2, 15)
} }
this.peerId = config.peerId; this.peerId = config.peerId;
@ -44,7 +44,7 @@ class p2p {
this.tcpServer.on('connection', (socket) => { this.tcpServer.on('connection', (socket) => {
this.tcpServer.getConnections((err,con) => { this.tcpServer.getConnections((err,con) => {
console.log('server connected', con, 'max', this.tcpServer.maxConnections) logT('p2p', 'server connected', con, 'max', this.tcpServer.maxConnections)
}) })
socket = new JsonSocket(socket); socket = new JsonSocket(socket);
this.clients.push(socket) this.clients.push(socket)
@ -108,7 +108,7 @@ class p2p {
// new peer with peer exchange // new peer with peer exchange
this.on('peer', (peer) => { this.on('peer', (peer) => {
console.log('got peer exchange', peer) logT('p2p', 'got peer exchange', peer)
this.add(peer) this.add(peer)
}) })
@ -124,7 +124,7 @@ class p2p {
if (alias >= 1) { if (alias >= 1) {
// nothing // nothing
} else { } else {
console.log('ignore local address', iface.address); logT('p2p', 'ignore local address', iface.address);
this.ignore(iface.address) this.ignore(iface.address)
} }
++alias; ++alias;
@ -133,7 +133,7 @@ class p2p {
} }
listen() { listen() {
console.log('listen p2p on', config.spiderPort, 'port') logT('p2p', 'listen p2p on', config.spiderPort, 'port')
this.tcpServer.listen(config.spiderPort, '0.0.0.0'); this.tcpServer.listen(config.spiderPort, '0.0.0.0');
} }
@ -145,17 +145,17 @@ class p2p {
// all ok don't need to start any ssh tunnels // all ok don't need to start any ssh tunnels
if(isAvailable) if(isAvailable)
{ {
console.log('tcp p2p port is reachable - all ok') logT('ssh', 'tcp p2p port is reachable - all ok')
return; return;
} }
else else
{ {
console.log('tcp p2p port is unreachable - try ssh tunnel') logT('ssh', 'tcp p2p port is unreachable - try ssh tunnel')
} }
if(!this.encryptor) if(!this.encryptor)
{ {
console.error('something wrong with encryptor') logT('ssh', 'something wrong with encryptor')
return return
} }
@ -170,7 +170,7 @@ class p2p {
return return
} }
console.log('ssh tunnel success, redirect peers to ssh') logT('ssh', 'ssh tunnel success, redirect peers to ssh')
this.p2pStatus = 1 this.p2pStatus = 1
this.send('p2pStatus', this.p2pStatus) this.send('p2pStatus', this.p2pStatus)
@ -185,7 +185,7 @@ class p2p {
{ {
if(this.ssh) if(this.ssh)
{ {
console.log('closing ssh...') logT('ssh', 'closing ssh...')
this.ssh.kill() this.ssh.kill()
} }
// close server // close server
@ -312,7 +312,7 @@ class p2p {
torrents: data.info ? data.info.torrents || 0 : 0 torrents: data.info ? data.info.torrents || 0 : 0
}) })
this.events.emit('peer', address) this.events.emit('peer', address)
console.log('new peer', address) logT('p2p', 'new peer', address)
// add some other peers // add some other peers
if(data.peers && data.peers.length > 0) if(data.peers && data.peers.length > 0)
@ -338,7 +338,7 @@ class p2p {
} }
this.peers.splice(index, 1); this.peers.splice(index, 1);
console.log('close peer connection', address) logT('p2p', 'close peer connection', address)
} }
}) })

View File

@ -136,8 +136,8 @@ const writeSphinxConfig = (path, dbPath) => {
config = iconv.encode(config, 'win1251') config = iconv.encode(config, 'win1251')
fs.writeFileSync(`${path}/sphinx.conf`, config) fs.writeFileSync(`${path}/sphinx.conf`, config)
console.log(`writed sphinx config to ${path}`) logT('sphinx', `writed sphinx config to ${path}`)
console.log('db path:', dbPath) logT('sphinx', 'db path:', dbPath)
return {isInitDb} return {isInitDb}
} }
@ -146,7 +146,7 @@ module.exports = (callback, dataDirectory, onClose) => {
const start = (callback) => { const start = (callback) => {
const sphinxPath = path.resolve(appPath('searchd')) const sphinxPath = path.resolve(appPath('searchd'))
console.log('Sphinx Path:', sphinxPath) logT('sphinx', 'Sphinx Path:', sphinxPath)
const sphinxConfigDirectory = dataDirectory const sphinxConfigDirectory = dataDirectory
appConfig['dbPath'] = appConfig.dbPath && appConfig.dbPath.length > 0 ? appConfig.dbPath : sphinxConfigDirectory; appConfig['dbPath'] = appConfig.dbPath && appConfig.dbPath.length > 0 ? appConfig.dbPath : sphinxConfigDirectory;
@ -174,14 +174,14 @@ module.exports = (callback, dataDirectory, onClose) => {
const optimizeResolvers = {} const optimizeResolvers = {}
sphinx.stdout.on('data', (data) => { sphinx.stdout.on('data', (data) => {
console.log(`sphinx: ${data}`) logT('sphinx', `sphinx: ${data}`)
// don't listen if we are in fixing mode // don't listen if we are in fixing mode
if(sphinx.fixing) if(sphinx.fixing)
return return
if (data.includes('accepting connections')) { if (data.includes('accepting connections')) {
console.log('catched sphinx start') logT('sphinx', 'catched sphinx start')
if(callback) if(callback)
callback() callback()
} }
@ -196,14 +196,14 @@ module.exports = (callback, dataDirectory, onClose) => {
{ {
if(optimizeResolvers[checkOptimized[1]]) if(optimizeResolvers[checkOptimized[1]])
{ {
console.log('resolve optimizer', checkOptimized[1]) logT('sphinx', 'resolve optimizer', checkOptimized[1])
optimizeResolvers[checkOptimized[1]]() optimizeResolvers[checkOptimized[1]]()
} }
} }
}) })
sphinx.on('close', (code, signal) => { sphinx.on('close', (code, signal) => {
console.log(`sphinx closed with code ${code} and signal ${signal}`) logT('sphinx', `sphinx closed with code ${code} and signal ${signal}`)
if(onClose && !sphinx.replaceOnClose) // sometime we don't want to call default callback if(onClose && !sphinx.replaceOnClose) // sometime we don't want to call default callback
onClose() onClose()
if(sphinx.onClose) if(sphinx.onClose)
@ -211,7 +211,7 @@ module.exports = (callback, dataDirectory, onClose) => {
}) })
sphinx.stop = (onFinish, replaceFinish) => { sphinx.stop = (onFinish, replaceFinish) => {
console.log('sphinx closing...') logT('sphinx', 'sphinx closing...')
if(onFinish) if(onFinish)
sphinx.onClose = onFinish sphinx.onClose = onFinish
if(replaceFinish) if(replaceFinish)
@ -234,7 +234,7 @@ module.exports = (callback, dataDirectory, onClose) => {
// close db // close db
await new Promise((resolve) => { await new Promise((resolve) => {
sphinx.stop(resolve, true) sphinx.stop(resolve, true)
console.log('revent start') logT('sphinx', 'revent start')
}) })
const checkNullFile = (file) => new Promise((resolve) => { const checkNullFile = (file) => new Promise((resolve) => {
@ -258,7 +258,7 @@ module.exports = (callback, dataDirectory, onClose) => {
brokenFiles = probablyCoruptedFiles.filter((file, index) => !brokenFiles[index]) brokenFiles = probablyCoruptedFiles.filter((file, index) => !brokenFiles[index])
brokenFiles.forEach(file => { brokenFiles.forEach(file => {
console.log('FIXDB: clean file because of broken', file) logT('sphinx', 'FIXDB: clean file because of broken', file)
fs.unlinkSync(file) fs.unlinkSync(file)
}) })

View File

@ -113,7 +113,7 @@ module.exports = function (send, recive, dataDirectory, version, env)
if(peers && peers.length > 0) if(peers && peers.length > 0)
{ {
peers.forEach(peer => p2p.add(peer)) peers.forEach(peer => p2p.add(peer))
console.log('loaded', peers.length, 'peers') logT('p2p', 'loaded', peers.length, 'peers')
} }
} }
@ -129,7 +129,7 @@ module.exports = function (send, recive, dataDirectory, version, env)
resolve(data.length > 0 && JSON.parse(data)) resolve(data.length > 0 && JSON.parse(data))
}); });
}).on("error", (err) => { }).on("error", (err) => {
console.log(`${url} error: ` + err.message) logT('http', `${url} error: ` + err.message)
resolve(false) resolve(false)
}); });
}) })
@ -145,7 +145,7 @@ module.exports = function (send, recive, dataDirectory, version, env)
if(peers && peers.length > 0) if(peers && peers.length > 0)
{ {
peers.forEach(peer => p2p.add(peer)) peers.forEach(peer => p2p.add(peer))
console.log('loaded', peers.length, 'peers from bootstrap') logT('p2p', 'loaded', peers.length, 'peers from bootstrap')
} }
} }
if(json.bootstrapMap) if(json.bootstrapMap)
@ -165,7 +165,7 @@ module.exports = function (send, recive, dataDirectory, version, env)
} }
} }
} }
console.log('loaded peers map from bootstrap') logT('p2p', 'loaded peers map from bootstrap')
} }
} }
@ -184,7 +184,7 @@ module.exports = function (send, recive, dataDirectory, version, env)
p2pBootstrapLoop = setInterval(() => { p2pBootstrapLoop = setInterval(() => {
if(p2p.size === 0) if(p2p.size === 0)
{ {
console.log('load peers from bootstap again because no peers at this moment') logT('p2p', 'load peers from bootstap again because no peers at this moment')
loadBootstrap() loadBootstrap()
} }
}, 90000) // try to load new peers if there is no one found }, 90000) // try to load new peers if there is no one found
@ -282,7 +282,7 @@ module.exports = function (send, recive, dataDirectory, version, env)
const checkTorrent = (torrent) => { const checkTorrent = (torrent) => {
if(config.filters.maxFiles > 0 && torrent.files > config.filters.maxFiles) if(config.filters.maxFiles > 0 && torrent.files > config.filters.maxFiles)
{ {
console.log('ignore', torrent.name, 'because files', torrent.files, '>', config.filters.maxFiles) logT('check', 'ignore', torrent.name, 'because files', torrent.files, '>', config.filters.maxFiles)
return false return false
} }
@ -292,37 +292,37 @@ module.exports = function (send, recive, dataDirectory, version, env)
const rx = new RegExp(nameRX) const rx = new RegExp(nameRX)
if(!config.filters.namingRegExpNegative && !rx.test(torrent.name)) if(!config.filters.namingRegExpNegative && !rx.test(torrent.name))
{ {
console.log('ignore', torrent.name, 'by naming rx') logT('check', 'ignore', torrent.name, 'by naming rx')
return false return false
} }
else if(config.filters.namingRegExpNegative && rx.test(torrent.name)) else if(config.filters.namingRegExpNegative && rx.test(torrent.name))
{ {
console.log('ignore', torrent.name, 'by naming rx negative') logT('check', 'ignore', torrent.name, 'by naming rx negative')
return false return false
} }
} }
if(torrent.contentType === 'bad') if(torrent.contentType === 'bad')
{ {
console.log('ignore torrent', torrent.name, 'because this is a bad thing') logT('check', 'ignore torrent', torrent.name, 'because this is a bad thing')
return false return false
} }
if(config.filters.adultFilter && torrent.contentCategory === 'xxx') if(config.filters.adultFilter && torrent.contentCategory === 'xxx')
{ {
console.log('ignore torrent', torrent.name, 'because adult filter') logT('check', 'ignore torrent', torrent.name, 'because adult filter')
return false return false
} }
if(config.filters.sizeEnabled && (torrent.size < config.filters.size.min || torrent.size > config.filters.size.max)) if(config.filters.sizeEnabled && (torrent.size < config.filters.size.min || torrent.size > config.filters.size.max))
{ {
console.log('ignore torrent', torrent.name, 'because size bounds of', torrent.size, ':', config.filters.size) logT('check', 'ignore torrent', torrent.name, 'because size bounds of', torrent.size, ':', config.filters.size)
return false return false
} }
if(config.filters.contentType && Array.isArray(config.filters.contentType) && !config.filters.contentType.includes(torrent.contentType)) if(config.filters.contentType && Array.isArray(config.filters.contentType) && !config.filters.contentType.includes(torrent.contentType))
{ {
console.log('ignore torrent', torrent.name, 'because type', torrent.contentType, 'not in:', config.filters.contentType) logT('check', 'ignore torrent', torrent.name, 'because type', torrent.contentType, 'not in:', config.filters.contentType)
return false return false
} }
@ -377,7 +377,7 @@ module.exports = function (send, recive, dataDirectory, version, env)
if(!filesList || filesList.length == 0) if(!filesList || filesList.length == 0)
{ {
console.log('skip torrent', torrent.name, '- no filesList') logT('add', 'skip torrent', torrent.name, '- no filesList')
resolve() resolve()
return return
} }
@ -423,7 +423,7 @@ module.exports = function (send, recive, dataDirectory, version, env)
sphinxSingle.query("SELECT id FROM torrents WHERE hash = ?", torrent.hash, (err, single) => { sphinxSingle.query("SELECT id FROM torrents WHERE hash = ?", torrent.hash, (err, single) => {
if(!single) if(!single)
{ {
console.log(err) logT('add', err)
resolve() resolve()
return return
} }
@ -462,7 +462,7 @@ module.exports = function (send, recive, dataDirectory, version, env)
} }
else else
{ {
console.log(torrent); logT('add', torrent);
console.error(err); console.error(err);
} }
resolve() resolve()
@ -494,7 +494,7 @@ module.exports = function (send, recive, dataDirectory, version, env)
} }
const insertMetadata = (metadata, infohash, rinfo) => { const insertMetadata = (metadata, infohash, rinfo) => {
console.log('finded torrent', metadata.info.name, ' and add to database'); logT('spider', 'finded torrent', metadata.info.name, ' and add to database');
const bufferToString = (buffer) => Buffer.isBuffer(buffer) ? buffer.toString() : buffer const bufferToString = (buffer) => Buffer.isBuffer(buffer) ? buffer.toString() : buffer
@ -551,7 +551,7 @@ module.exports = function (send, recive, dataDirectory, version, env)
{ {
disk.check(rootPath, function(err, info) { disk.check(rootPath, function(err, info) {
if (err) { if (err) {
console.log(err); logT('quota', err);
} else { } else {
const {available, free, total} = info; const {available, free, total} = info;
@ -594,7 +594,7 @@ module.exports = function (send, recive, dataDirectory, version, env)
} }
recive('dropTorrents', (pathTorrents) => { recive('dropTorrents', (pathTorrents) => {
console.log('drop torrents and replicate from original') logT('drop', 'drop torrents and replicate from original')
const torrents = pathTorrents.map(path => parseTorrent(fs.readFileSync(path))) const torrents = pathTorrents.map(path => parseTorrent(fs.readFileSync(path)))
torrents.forEach(torrent => insertMetadata(torrent, torrent.infoHashBuffer, {address: '127.0.0.1', port: 666})) torrents.forEach(torrent => insertMetadata(torrent, torrent.infoHashBuffer, {address: '127.0.0.1', port: 666}))
}) })
@ -610,7 +610,7 @@ module.exports = function (send, recive, dataDirectory, version, env)
const {address, port} = stunMsg.getAttribute(STUN_ATTR_XOR_MAPPED_ADDRESS).value const {address, port} = stunMsg.getAttribute(STUN_ATTR_XOR_MAPPED_ADDRESS).value
stunServer.close() stunServer.close()
console.log('p2p stun ignore my address', address) logT('stun', 'p2p stun ignore my address', address)
p2p.ignore(address) p2p.ignore(address)
// check port avalibility // check port avalibility
@ -632,7 +632,7 @@ module.exports = function (send, recive, dataDirectory, version, env)
ttl: 0 ttl: 0
}, function(err) { }, function(err) {
if(err) if(err)
console.log('upnp server dont respond') logT('upnp', 'upnp server dont respond')
}); });
upnp.portMapping({ upnp.portMapping({
public: config.spiderPort, public: config.spiderPort,
@ -642,7 +642,7 @@ module.exports = function (send, recive, dataDirectory, version, env)
ttl: 0 ttl: 0
}, function(err) { }, function(err) {
if(err) if(err)
console.log('upnp server dont respond') logT('upnp', 'upnp server dont respond')
}); });
upnp.portMapping({ upnp.portMapping({
public: config.udpTrackersPort, public: config.udpTrackersPort,
@ -652,7 +652,7 @@ module.exports = function (send, recive, dataDirectory, version, env)
ttl: 0 ttl: 0
}, function(err) { }, function(err) {
if(err) if(err)
console.log('upnp server dont respond') logT('upnp', 'upnp server dont respond')
}); });
} }
@ -677,7 +677,7 @@ module.exports = function (send, recive, dataDirectory, version, env)
if(err) if(err)
return return
console.log('p2p upnp ignore my address', ip) logT('upnp', 'p2p upnp ignore my address', ip)
p2p.ignore(ip) p2p.ignore(ip)
}); });
} }
@ -732,20 +732,20 @@ module.exports = function (send, recive, dataDirectory, version, env)
} }
// load torrents sessions // load torrents sessions
console.log('restore downloading sessions') logT('downloader', 'restore downloading sessions')
torrentClient.loadSession(dataDirectory + '/downloads.json') torrentClient.loadSession(dataDirectory + '/downloads.json')
this.stop = async (callback) => { this.stop = async (callback) => {
this.closing = true this.closing = true
console.log('spider closing...') logT('close', 'spider closing...')
if(upnp) if(upnp)
upnp.ratsUnmap() upnp.ratsUnmap()
console.log('closing alternative db interface') logT('close', 'closing alternative db interface')
await new Promise(resolve => sphinxSingleAlternative.end(resolve)) await new Promise(resolve => sphinxSingleAlternative.end(resolve))
// save torrents sessions // save torrents sessions
console.log('save torrents downloads sessions') logT('close', 'save torrents downloads sessions')
torrentClient.saveSession(dataDirectory + '/downloads.json') torrentClient.saveSession(dataDirectory + '/downloads.json')
// save feed // save feed
@ -755,7 +755,7 @@ module.exports = function (send, recive, dataDirectory, version, env)
if(config.p2pBootstrap && p2pBootstrapLoop) if(config.p2pBootstrap && p2pBootstrapLoop)
{ {
clearInterval(p2pBootstrapLoop) clearInterval(p2pBootstrapLoop)
console.log('bootstrap loop stoped') logT('close', 'bootstrap loop stoped')
} }
// safe future peers // safe future peers
@ -766,7 +766,7 @@ module.exports = function (send, recive, dataDirectory, version, env)
if(addresses.length > 0) if(addresses.length > 0)
{ {
fs.writeFileSync(dataDirectory + '/peers.p2p', peersEncripted, 'utf8'); fs.writeFileSync(dataDirectory + '/peers.p2p', peersEncripted, 'utf8');
console.log('local peers saved') logT('close', 'local peers saved')
} }
if(config.p2pBootstrap) if(config.p2pBootstrap)
@ -804,7 +804,7 @@ module.exports = function (send, recive, dataDirectory, version, env)
'Content-Type' : "application/json", 'Content-Type' : "application/json",
} }
}; };
console.log('bootstrap peers saved to', host) logT('close', 'bootstrap peers saved to', host)
const req = http.request(options, resolve); const req = http.request(options, resolve);
req.on('error', resolve) req.on('error', resolve)
req.end(JSON.stringify({ req.end(JSON.stringify({
@ -820,7 +820,7 @@ module.exports = function (send, recive, dataDirectory, version, env)
} }
} }
console.log('closing p2p...') logT('close', 'closing p2p...')
// don't listen spider peer appears // don't listen spider peer appears
spider.removeAllListeners('peer') spider.removeAllListeners('peer')
await p2p.close() await p2p.close()
@ -828,11 +828,11 @@ module.exports = function (send, recive, dataDirectory, version, env)
// don't listen complete torrent responses // don't listen complete torrent responses
client.removeAllListeners('complete') client.removeAllListeners('complete')
console.log('closing torrent client') logT('close', 'closing torrent client')
torrentClient.destroy(() => { torrentClient.destroy(() => {
sphinx.end(() => spider.close(() => { sphinx.end(() => spider.close(() => {
sphinxSingle.destroy() sphinxSingle.destroy()
console.log('spider closed') logT('close', 'spider closed')
callback() callback()
})) }))
}) })

View File

@ -46,7 +46,7 @@ const startSSH = (port, host, user, password, callback) => {
} }
ssh.stdout.on('data', (data) => { ssh.stdout.on('data', (data) => {
console.log(`ssh: ${data}`) logT('ssh', `ssh: ${data}`)
checkMessage(data) checkMessage(data)
if(data.includes('Store key in cache?')) if(data.includes('Store key in cache?'))
{ {
@ -56,7 +56,7 @@ const startSSH = (port, host, user, password, callback) => {
}) })
ssh.stderr.on('data', (data) => { ssh.stderr.on('data', (data) => {
console.log(`ssh error: ${data}`); logT('ssh', `ssh error: ${data}`);
checkMessage(data) checkMessage(data)
if(data.includes('Password authentication failed')) if(data.includes('Password authentication failed'))
{ {
@ -70,7 +70,7 @@ const startSSH = (port, host, user, password, callback) => {
}); });
ssh.on('close', (code, signal) => { ssh.on('close', (code, signal) => {
console.log(`ssh closed with code ${code} and signal ${signal}`) logT('ssh', `ssh closed with code ${code} and signal ${signal}`)
if(callback) if(callback)
callback(false) callback(false)
}) })

View File

@ -13,7 +13,7 @@ module.exports = class P2PStore extends EventEmitter {
}); });
this.synchronized = false this.synchronized = false
console.log('connect p2p store...') logT('store', 'connect p2p store...')
this.p2p = p2p this.p2p = p2p
this.sphinx = sphinx this.sphinx = sphinx
@ -24,7 +24,7 @@ module.exports = class P2PStore extends EventEmitter {
if(rows[0] && rows[0].mx >= 1) if(rows[0] && rows[0].mx >= 1)
this.id = rows[0].mx; this.id = rows[0].mx;
console.log('store db index', this.id) logT('store', 'store db index', this.id)
this.p2p.events.on('peer', (peer) => { this.p2p.events.on('peer', (peer) => {
if(peer.info && peer.info.store) if(peer.info && peer.info.store)
@ -40,7 +40,7 @@ module.exports = class P2PStore extends EventEmitter {
this.p2p.on('dbStore', (record) => { this.p2p.on('dbStore', (record) => {
if(!record || record.id - 1 !== this.id) if(!record || record.id - 1 !== this.id)
{ {
console.log('out of range peerdb store', record.id) logT('store', 'out of range peerdb store', record.id)
return return
} }
@ -51,7 +51,7 @@ module.exports = class P2PStore extends EventEmitter {
}) })
this.p2p.on('dbSync', ({id} = {}, callback) => { this.p2p.on('dbSync', ({id} = {}, callback) => {
console.log('ask to sync db from', id, 'version') logT('store', 'ask to sync db from', id, 'version')
if(typeof id === 'undefined' || id >= this.id) if(typeof id === 'undefined' || id >= this.id)
{ {
callback(false) callback(false)
@ -62,7 +62,7 @@ module.exports = class P2PStore extends EventEmitter {
this.sphinx.query(`select * from store where id > ${id}`, (err, records) => { this.sphinx.query(`select * from store where id > ${id}`, (err, records) => {
if(err) if(err)
{ {
console.log(err) logT('store', err)
return return
} }
@ -77,7 +77,7 @@ module.exports = class P2PStore extends EventEmitter {
sync(peer) sync(peer)
{ {
console.log('sync db on version', this.id, peer ? `from peer ${peer.peerId}` : '') logT('store', 'sync db on version', this.id, peer ? `from peer ${peer.peerId}` : '')
const processSync = (data, nil, peer) => { const processSync = (data, nil, peer) => {
if(!data || !data.records) if(!data || !data.records)
return return
@ -90,7 +90,7 @@ module.exports = class P2PStore extends EventEmitter {
&& oldIndex < this.id // last sync update of store must be successful, otherwise no point to try sync db from this peer && oldIndex < this.id // last sync update of store must be successful, otherwise no point to try sync db from this peer
&& this.id < data.index) && this.id < data.index)
{ {
console.log('continue sync store from', this.id, 'index', 'peer', peer.peerId) logT('store', 'continue sync store from', this.id, 'index', 'peer', peer.peerId)
peer.emit('dbSync', {id: this.id}, processSync) peer.emit('dbSync', {id: this.id}, processSync)
} }
} }
@ -119,7 +119,7 @@ module.exports = class P2PStore extends EventEmitter {
// check hash // check hash
if(objectHash(record.data) !== record.hash) if(objectHash(record.data) !== record.hash)
{ {
console.log('wrong hash for sync peerdb') logT('store', 'wrong hash for sync peerdb')
return return
} }
@ -127,7 +127,7 @@ module.exports = class P2PStore extends EventEmitter {
record.myself = false record.myself = false
// push to db // push to db
console.log('sync peerdb record', record.id) logT('store', 'sync peerdb record', record.id)
this._pushToDb(record) this._pushToDb(record)
this.id = record.id this.id = record.id
@ -145,7 +145,7 @@ module.exports = class P2PStore extends EventEmitter {
(err) => { (err) => {
if(err) if(err)
{ {
console.log(err) logT('store', err)
return return
} }
@ -159,7 +159,7 @@ module.exports = class P2PStore extends EventEmitter {
{ {
if(!this.synchronized) if(!this.synchronized)
{ {
console.log('cant store item on unsync db') logT('store', 'cant store item on unsync db')
return false return false
} }
@ -177,7 +177,7 @@ module.exports = class P2PStore extends EventEmitter {
temp temp
} }
console.log('store object', value.id) logT('store', 'store object', value.id)
this._pushToDb(value, () => { this._pushToDb(value, () => {
// store record // store record