From 02bc92fc1168c0ff9020a816de652b4939349a50 Mon Sep 17 00:00:00 2001 From: Alexey Kasyanchuk Date: Thu, 2 Aug 2018 17:56:38 +0300 Subject: [PATCH 01/25] electron update --- package-lock.json | 12 ++++++------ package.json | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/package-lock.json b/package-lock.json index 4640280..0fc30b3 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1684,9 +1684,9 @@ "dev": true }, "@types/node": { - "version": "8.10.22", - "resolved": "https://registry.npmjs.org/@types/node/-/node-8.10.22.tgz", - "integrity": "sha512-HCJ1dUJEQVFRekwBAlyv9pJ+2rzxq9uimSmsK2q7YDYMbXR3b4BXcO9rsN+36ZBwSWQ5BNh5o8xdZijDSonS5A==", + "version": "8.10.23", + "resolved": "https://registry.npmjs.org/@types/node/-/node-8.10.23.tgz", + "integrity": "sha512-aEp5ZTLr4mYhR9S85cJ+sEYkcsgFY10N1Si5m49iTAVzanZXOwp/pgw6ibFLKXxpflqm71aSWZCRtnTXXO56gA==", "dev": true }, "@webassemblyjs/ast": { @@ -6665,9 +6665,9 @@ "dev": true }, "electron": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/electron/-/electron-2.0.5.tgz", - "integrity": "sha512-NbWsgAvcxxQrDNaLA2L5adZTKWO6mZwC57uSPQiZiFjpO0K6uVNCjFyRbLnhq8AWq2tmcuzs6mFpIzQXmvlnUQ==", + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/electron/-/electron-2.0.6.tgz", + "integrity": "sha512-1UHBWHF2EMjjVyTvcdcUBmISnoxElY4cUgkFVslw5pM1HxTVzi2vev+8NBohdLLFGbIbPyNua5vcBg+bxo1rqw==", "dev": true, "requires": { "@types/node": "^8.0.24", diff --git a/package.json b/package.json index 5f79cb1..21e4fe0 100644 --- a/package.json +++ b/package.json @@ -153,7 +153,7 @@ "babel-plugin-transform-object-rest-spread": "^7.0.0-beta.3", "chai": "^4.1.2", "css-loader": "^0.28.11", - "electron": "2.0.5", + "electron": "2.0.6", "electron-builder": "20.14.7", "eslint": "^4.19.1", "eslint-plugin-react": "^7.9.1", From 2631662168f76fca2299edf074a7192f8e85403d Mon Sep 17 00:00:00 2001 From: Alexey Kasyanchuk Date: Thu, 2 Aug 2018 18:22:05 +0300 Subject: [PATCH 02/25] fix(linux): fix closing on linux --- src/background/background.js | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/src/background/background.js b/src/background/background.js index 6c6c85c..6787025 100644 --- a/src/background/background.js +++ b/src/background/background.js @@ -317,4 +317,20 @@ app.on('before-quit', () => { app.isQuiting = true if (sphinx) stop() -}) \ No newline at end of file +}) + +var rl = require("readline").createInterface({ + input: process.stdin, + output: process.stdout +}); + +rl.on("SIGINT", function () { + process.emit("SIGINT"); +}); + +process.on("SIGINT", () => { + if (sphinx) + stop() + else + app.quit() +}); From eb5d7d1594c7ac8bc99e931fb3bc72954649a1a2 Mon Sep 17 00:00:00 2001 From: Alexey Kasyanchuk Date: Thu, 2 Aug 2018 18:32:49 +0300 Subject: [PATCH 03/25] fix(linux): fix console control after exit --- src/background/background.js | 3 +++ src/background/server.js | 1 + 2 files changed, 4 insertions(+) diff --git a/src/background/background.js b/src/background/background.js index 6787025..28596fe 100644 --- a/src/background/background.js +++ b/src/background/background.js @@ -314,6 +314,9 @@ app.on("window-all-closed", () => { }); app.on('before-quit', () => { + if(rl) + rl.close() + app.isQuiting = true if (sphinx) stop() diff --git a/src/background/server.js b/src/background/server.js index 0935e87..ff66247 100644 --- a/src/background/server.js +++ b/src/background/server.js @@ -63,6 +63,7 @@ rl.on("SIGINT", function () { }); process.on("SIGINT", () => { + rl.close() if(spider) { spider.stop(() => sphinx.stop(() => process.exit())) From 28c07fc68939e012247de4ce69065ab72324ccb5 Mon Sep 17 00:00:00 2001 From: Alexey Kasyanchuk Date: Thu, 2 Aug 2018 22:22:52 +0300 Subject: [PATCH 04/25] fix(scanner): fix enconding names in some cases #55 --- src/background/spider.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/background/spider.js b/src/background/spider.js index 332e2c7..f14e6ab 100644 --- a/src/background/spider.js +++ b/src/background/spider.js @@ -598,19 +598,19 @@ app.get('*', function(req, res) for(let i = 0; i < metadata.info.files.length; i++) { let file = metadata.info.files[i]; - let filePath = bufferToString(file.path).join('/'); + let filePath = bufferToString(file['path.utf-8'] || file.path).join('/'); filesAdd(filePath, file.length); size += file.length; } } else { - filesAdd(bufferToString(metadata.info.name), size) + filesAdd(bufferToString(metadata.info['name.utf-8'] || metadata.info.name), size) } const torrentQ = { hash: hash, - name: bufferToString(metadata.info.name), + name: bufferToString(metadata.info['name.utf-8'] || metadata.info.name), size: size, files: filesCount, piecelength: metadata.info['piece length'], From 0f450d3133b538b072a899b5396343dfff375c69 Mon Sep 17 00:00:00 2001 From: Alexey Kasyanchuk Date: Fri, 3 Aug 2018 15:23:25 +0300 Subject: [PATCH 05/25] fix(background): one closing pattern --- src/background/background.js | 19 ++++++++----------- 1 file changed, 8 insertions(+), 11 deletions(-) diff --git a/src/background/background.js b/src/background/background.js index 28596fe..fd0d124 100644 --- a/src/background/background.js +++ b/src/background/background.js @@ -300,17 +300,18 @@ const stop = () => { { spider.stop(() => sphinx.stop()) } - else + else if(sphinx) { sphinx.stop() } + else + { + app.quit() + } } app.on("window-all-closed", () => { - if (sphinx) - stop() - else - app.quit() + stop() }); app.on('before-quit', () => { @@ -318,8 +319,7 @@ app.on('before-quit', () => { rl.close() app.isQuiting = true - if (sphinx) - stop() + stop() }) var rl = require("readline").createInterface({ @@ -332,8 +332,5 @@ rl.on("SIGINT", function () { }); process.on("SIGINT", () => { - if (sphinx) - stop() - else - app.quit() + stop() }); From 9d5667eb0045f7a0242631dfbd52ba2fdc431290 Mon Sep 17 00:00:00 2001 From: Alexey Kasyanchuk Date: Fri, 3 Aug 2018 15:55:21 +0300 Subject: [PATCH 06/25] fix(closing): window can be closing on event --- src/background/background.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/background/background.js b/src/background/background.js index fd0d124..a6c567b 100644 --- a/src/background/background.js +++ b/src/background/background.js @@ -266,7 +266,8 @@ app.on("ready", () => { { const id = arg[arg.length - 1].callback arg[arg.length - 1] = (responce) => { - mainWindow.webContents.send('callback', id, responce) + if(mainWindow) + mainWindow.webContents.send('callback', id, responce) } } callback.apply(null, arg) From 585b68338052ed7f7913779e949617a9d4dab6c2 Mon Sep 17 00:00:00 2001 From: Alexey Kasyanchuk Date: Sun, 5 Aug 2018 00:38:11 +0300 Subject: [PATCH 07/25] feat(closing): fast window closing/hiding --- src/background/background.js | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/background/background.js b/src/background/background.js index a6c567b..297d69a 100644 --- a/src/background/background.js +++ b/src/background/background.js @@ -201,10 +201,7 @@ app.on("ready", () => { } }, { label: 'Quit', click: function(){ app.isQuiting = true; - if (sphinx) - stop() - else - app.quit() + stop() } } ]); @@ -294,6 +291,10 @@ const stop = () => { return stopProtect = true + // hide on case of long exit, to prevent user clicks + if(mainWindow) + mainWindow.hide() + if(tray) tray.destroy() From 45af9bddeaf62a6d804879eedddd3ad1fd9e15b9 Mon Sep 17 00:00:00 2001 From: Alexey Kasyanchuk Date: Sun, 5 Aug 2018 01:43:22 +0300 Subject: [PATCH 08/25] fix(macos): fix crashes under Mac OS X --- src/background/background.js | 10 ++++++++-- src/background/config.js | 2 +- src/background/feed.js | 2 +- 3 files changed, 10 insertions(+), 4 deletions(-) diff --git a/src/background/background.js b/src/background/background.js index 297d69a..347d797 100644 --- a/src/background/background.js +++ b/src/background/background.js @@ -295,8 +295,14 @@ const stop = () => { if(mainWindow) mainWindow.hide() - if(tray) - tray.destroy() + // bug with mac os tray closing + // https://github.com/electron/electron/issues/9982 + // https://github.com/electron/electron/issues/13556 + if(process.platform !== 'darwin') + { + if(tray) + tray.destroy() + } if(spider) { diff --git a/src/background/config.js b/src/background/config.js index 947c2b4..038898c 100644 --- a/src/background/config.js +++ b/src/background/config.js @@ -28,7 +28,7 @@ let config = { sphinx: { host : '127.0.0.1', port : 9306, - connectionLimit: 12 + connectionLimit: 10 }, spider: { diff --git a/src/background/feed.js b/src/background/feed.js index d5d674b..4ed6e5e 100644 --- a/src/background/feed.js +++ b/src/background/feed.js @@ -23,7 +23,7 @@ module.exports = class Feed { config.feedDate = this.feedDate await this.sphinx.query('delete from feed where id > 0') let id = 0 - return Promise.all( + await Promise.all( this.feed.map( async record => await this.sphinx.query('insert into feed(id, data) values(?, ?)', [++id, JSON.stringify(record)]) ) From e77775794fda049c88251092b782bcf3bb6ca404 Mon Sep 17 00:00:00 2001 From: Alexey Kasyanchuk Date: Sun, 5 Aug 2018 23:48:31 +0300 Subject: [PATCH 09/25] perf(replication): replication thread optimization --- src/background/api.js | 11 ++- src/background/mysql.js | 100 ++++++++++++++------------ src/background/spider.js | 148 ++++++++------------------------------- 3 files changed, 90 insertions(+), 169 deletions(-) diff --git a/src/background/api.js b/src/background/api.js index f47086d..f7b0ce3 100644 --- a/src/background/api.js +++ b/src/background/api.js @@ -7,6 +7,7 @@ const asyncForEach = require('./asyncForEach') module.exports = async ({ sphinx, + sphinxSingle, send, recive, p2p, @@ -207,8 +208,12 @@ module.exports = async ({ p2p.on('randomTorrents', (nil, callback) => { if(typeof callback != 'function') return; - - sphinx.query('SELECT * FROM `torrents` ORDER BY rand() limit 5', (error, torrents) => { + + // ignore sql requests on closing + if(sphinxSingle.state === 'disconnected') + return + + sphinxSingle.query('SELECT * FROM `torrents` ORDER BY rand() limit 5', (error, torrents) => { if(!torrents || torrents.length == 0) { callback(undefined) return; @@ -222,7 +227,7 @@ module.exports = async ({ } const inSql = Object.keys(hashes).map(hash => sphinx.escape(hash)).join(','); - sphinx.query(`SELECT * FROM files WHERE hash IN(${inSql}) limit 50000`, (error, files) => { + sphinxSingle.query(`SELECT * FROM files WHERE hash IN(${inSql}) limit 50000`, (error, files) => { if(!files) { files = [] diff --git a/src/background/mysql.js b/src/background/mysql.js index 40db40c..0cd9b64 100644 --- a/src/background/mysql.js +++ b/src/background/mysql.js @@ -122,57 +122,65 @@ const pool = () => { return expand(sphinx) } -let mysqlSingle = { - _mysql: null -}; -const proxySingle = new Proxy(mysqlSingle, { - get(target, prop) { - if(!target[prop]) - { - let ret = target._mysql[prop] - if(typeof ret === 'function') - ret = ret.bind(target._mysql) - return ret - } - return target[prop] - } -}) const single = (callback) => { - mysqlSingle._mysql = mysql.createConnection({ - host : config.sphinx.host, - port : config.sphinx.port - }); + let mysqlSingle = { + _mysql: null + }; - let promiseResolve; - const connectionPromise = new Promise((resolve) => { - promiseResolve = resolve + const proxySingle = new Proxy(mysqlSingle, { + get(target, prop) { + if(!target[prop]) + { + let ret = target._mysql[prop] + if(typeof ret === 'function') + ret = ret.bind(target._mysql) + return ret + } + return target[prop] + } }) - mysqlSingle.waitConnection = () => connectionPromise; - - mysqlSingle._mysql.connect((mysqlError) => { - if (mysqlError) { - console.error('error connecting: ' + mysqlError.stack); - return; - } - - if(callback) - callback(proxySingle) - promiseResolve(proxySingle) - }); - - mysqlSingle._mysql.on('error', (err) => { - console.log('db error', err); - if(err.code === 'PROTOCOL_CONNECTION_LOST') { // Connection to the MySQL server is usually - mysqlSingle._mysql = undefined - single(); // lost due to either server restart, or a - } else { // connnection idle timeout (the wait_timeout - throw err; // server variable configures this) - } - }); + const start = () => + { + mysqlSingle._mysql = mysql.createConnection({ + host : config.sphinx.host, + port : config.sphinx.port + }); - mysqlSingle._mysql = expand(mysqlSingle._mysql) - return proxySingle + let promiseResolve; + const connectionPromise = new Promise((resolve) => { + promiseResolve = resolve + }) + mysqlSingle.waitConnection = () => connectionPromise; + + mysqlSingle._mysql.connect((mysqlError) => { + if (mysqlError) { + console.error('error connecting: ' + mysqlError.stack); + return; + } + + if(callback) + callback(proxySingle) + + promiseResolve(proxySingle) + }); + + mysqlSingle._mysql.on('error', (err) => { + console.log('db error', err); + if(err.code === 'PROTOCOL_CONNECTION_LOST') { // Connection to the MySQL server is usually + console.log('restart single sql connection') + mysqlSingle._mysql = undefined + start(); // lost due to either server restart, or a + } else { // connnection idle timeout (the wait_timeout + throw err; // server variable configures this) + } + }); + + mysqlSingle._mysql = expand(mysqlSingle._mysql) + return proxySingle + } + + return start() } module.exports = {pool, single} \ No newline at end of file diff --git a/src/background/spider.js b/src/background/spider.js index f14e6ab..997e5fc 100644 --- a/src/background/spider.js +++ b/src/background/spider.js @@ -73,114 +73,17 @@ module.exports = function (send, recive, dataDirectory, version, env) } ] - let mysqlSingle = single((mysqlSingle) => { - mysqlSingle.query("SELECT MAX(`id`) as mx from torrents", (err, rows) => { - if(err) - return - - if(rows[0] && rows[0].mx >= 1) - torrentsId = rows[0].mx + 1; - }) - - mysqlSingle.query("SELECT COUNT(*) as cnt from torrents", (err, rows) => { - if(err) - return - - p2p.info.torrents = rows[0].cnt - }) - - mysqlSingle.query("SELECT MAX(`id`) as mx from files", (err, rows) => { - if(err) - return - - if(rows[0] &&rows[0].mx >= 1) - filesId = rows[0].mx + 1; - }) - - mysqlSingle.query("SELECT COUNT(*) as cnt from files", (err, rows) => { - if(err) - return - - p2p.info.files = rows[0].cnt - }) - }); - - /* -app.use(express.static('build', {index: false})); - -app.get('/sitemap.xml', function(req, res) { - sphinx.query('SELECT count(*) as cnt FROM `torrents` WHERE contentCategory != \'xxx\' OR contentCategory IS NULL', function (error, rows, fields) { - if(!rows) { - return; - } - let urls = [] - for(let i = 0; i < Math.ceil(rows[0].cnt / config.sitemapMaxSize); i++) - urls.push(`http://${config.domain}/sitemap${i+1}.xml`); - - res.header('Content-Type', 'application/xml'); - res.send( sm.buildSitemapIndex({ - urls - })); - }); -}); - -app.get('/sitemap:id.xml', function(req, res) { - if(req.params.id < 1) - return; - - let page = (req.params.id - 1) * config.sitemapMaxSize - - sphinx.query('SELECT hash FROM `torrents` WHERE contentCategory != \'xxx\' OR contentCategory IS NULL LIMIT ?, ?', [page, config.sitemapMaxSize], function (error, rows, fields) { - if(!rows) { - return; - } - let sitemap = sm.createSitemap ({ - hostname: 'http://' + config.domain, - cacheTime: 600000 - }); - sitemap.add({url: '/'}); - for(let i = 0; i < rows.length; i++) - sitemap.add({url: '/torrent/' + rows[i].hash}); - - sitemap.toXML( function (err, xml) { - if (err) { - return res.status(500).end(); - } - res.header('Content-Type', 'application/xml'); - res.send( xml ); - }); - }); -}); - - -app.get('*', function(req, res) -{ - if(typeof req.query['_escaped_fragment_'] != 'undefined') - { - let program = phantomjs.exec('phantom.js', 'http://' + config.domain + req.path) - let body = ''; - let timeout = setTimeout(() => { - program.kill(); - }, 45000) - program.stderr.pipe(process.stderr) - program.stdout.on('data', (chunk) => { - body += chunk; - }); - program.on('exit', code => { - clearTimeout(timeout); - res.header('Content-Type', 'text/html'); - res.send( body ); - }) - - return; - } - - res.sendfile(__dirname + '/build/index.html'); -}); -*/ + const sphinxSingle = await single().waitConnection() + torrentsId = (await sphinxSingle.query("SELECT MAX(`id`) as mx from torrents"))[0] + torrentsId = ((torrentsId && torrentsId.mx) || 0) + 1 + filesId = (await sphinxSingle.query("SELECT MAX(`id`) as mx from files"))[0] + filesId = ((filesId && filesId.mx) || 0) + 1 + p2p.info.torrents = (await sphinxSingle.query("SELECT COUNT(*) as cnt from torrents"))[0].cnt + p2p.info.files = (await sphinxSingle.query("SELECT COUNT(*) as cnt from files"))[0].cnt + const sphinxSingleAlternative = await single().waitConnection() + // start - function baseRowData(row) { return { @@ -289,7 +192,7 @@ app.get('*', function(req, res) const updateTorrentTrackers = (hash) => { let maxSeeders = 0, maxLeechers = 0, maxCompleted = 0; - mysqlSingle.query('UPDATE torrents SET trackersChecked = ? WHERE hash = ?', [Math.floor(Date.now() / 1000), hash], (err, result) => { + sphinxSingle.query('UPDATE torrents SET trackersChecked = ? WHERE hash = ?', [Math.floor(Date.now() / 1000), hash], (err, result) => { if(!result) { console.error(err); return @@ -320,7 +223,7 @@ app.get('*', function(req, res) maxCompleted = completed; let checkTime = new Date(); - mysqlSingle.query('UPDATE torrents SET seeders = ?, completed = ?, leechers = ?, trackersChecked = ? WHERE hash = ?', [seeders, completed, leechers, Math.floor(checkTime.getTime() / 1000), hash], function(err, result) { + sphinxSingle.query('UPDATE torrents SET seeders = ?, completed = ?, leechers = ?, trackersChecked = ? WHERE hash = ?', [seeders, completed, leechers, Math.floor(checkTime.getTime() / 1000), hash], function(err, result) { if(!result) { console.error(err); return @@ -352,7 +255,7 @@ app.get('*', function(req, res) if(free < config.cleanupDiscLimit) { - mysqlSingle.query(`SELECT * FROM torrents WHERE added < DATE_SUB(NOW(), INTERVAL 6 hour) ORDER BY seeders ASC, files DESC, leechers ASC, completed ASC LIMIT ${cleanTorrents}`, function(err, torrents) { + sphinxSingle.query(`SELECT * FROM torrents WHERE added < DATE_SUB(NOW(), INTERVAL 6 hour) ORDER BY seeders ASC, files DESC, leechers ASC, completed ASC LIMIT ${cleanTorrents}`, function(err, torrents) { if(!torrents) return; @@ -364,8 +267,8 @@ app.get('*', function(req, res) cleanupDebug('cleanup torrent', torrent.name, '[seeders', torrent.seeders, ', files', torrent.files, ']', 'free', (free / (1024 * 1024)) + "mb"); - mysqlSingle.query('DELETE FROM files WHERE hash = ?', torrent.hash); - mysqlSingle.query('DELETE FROM torrents WHERE hash = ?', torrent.hash); + sphinxSingle.query('DELETE FROM files WHERE hash = ?', torrent.hash); + sphinxSingle.query('DELETE FROM torrents WHERE hash = ?', torrent.hash); }) }); } @@ -481,7 +384,7 @@ app.get('*', function(req, res) torrent.id = torrentsId++; - mysqlSingle.query("SELECT id FROM torrents WHERE hash = ?", torrent.hash, (err, single) => { + sphinxSingle.query("SELECT id FROM torrents WHERE hash = ?", torrent.hash, (err, single) => { if(!single) { console.log(err) @@ -497,7 +400,7 @@ app.get('*', function(req, res) torrent.nameIndex = torrent.name - mysqlSingle.insertValues('torrents', torrent, function(err, result) { + sphinxSingle.insertValues('torrents', torrent, function(err, result) { if(result) { if(!silent) send('newTorrent', { @@ -521,14 +424,14 @@ app.get('*', function(req, res) }); }) - mysqlSingle.query('SELECT count(*) as files_count FROM files WHERE hash = ?', [torrent.hash], function(err, rows) { + sphinxSingle.query('SELECT count(*) as files_count FROM files WHERE hash = ?', [torrent.hash], function(err, rows) { if(!rows) return const db_files = rows[0]['files_count']; if(db_files !== torrent.files) { - mysqlSingle.query('DELETE FROM files WHERE hash = ?', torrent.hash, function (err, result) { + sphinxSingle.query('DELETE FROM files WHERE hash = ?', torrent.hash, function (err, result) { if(err) { return; @@ -539,7 +442,7 @@ app.get('*', function(req, res) file.pathIndex = file.path; }); - mysqlSingle.insertValues('files', filesList, function(err, result) { + sphinxSingle.insertValues('files', filesList, function(err, result) { if(!result) { console.error(err); return @@ -554,8 +457,8 @@ app.get('*', function(req, res) const removeTorrentFromDB = async (torrent) => { const {hash} = torrent - await mysqlSingle.query('DELETE FROM torrents WHERE hash = ?', hash) - await mysqlSingle.query('DELETE FROM files WHERE hash = ?', hash) + await sphinxSingle.query('DELETE FROM torrents WHERE hash = ?', hash) + await sphinxSingle.query('DELETE FROM files WHERE hash = ?', hash) } const updateTorrentToDB = async (torrent) => { @@ -571,7 +474,7 @@ app.get('*', function(req, res) delete torrent.id delete torrent.filesList - await mysqlSingle.updateValues('torrents', torrent, {hash: torrent.hash}) + await sphinxSingle.updateValues('torrents', torrent, {hash: torrent.hash}) } const insertMetadata = (metadata, infohash, rinfo) => { @@ -774,6 +677,7 @@ app.get('*', function(req, res) // setup api await API({ sphinx, + sphinxSingle: sphinxSingleAlternative, recive, send, p2p, @@ -821,6 +725,9 @@ app.get('*', function(req, res) if(upnp) upnp.ratsUnmap() + console.log('closing alternative db interface') + await new Promise(resolve => sphinxSingleAlternative.end(resolve)) + // save torrents sessions console.log('save torrents downloads sessions') torrentClient.saveSession(dataDirectory + '/downloads.json') @@ -905,9 +812,10 @@ app.get('*', function(req, res) // don't listen complete torrent responses client.removeAllListeners('complete') + console.log('closing torrent client') torrentClient.destroy(() => { sphinx.end(() => spider.close(() => { - mysqlSingle.destroy() + sphinxSingle.destroy() console.log('spider closed') callback() })) From 25a3d8b6dd86ec2b4d141b2567b557825d62cc9d Mon Sep 17 00:00:00 2001 From: Alexey Kasyanchuk Date: Mon, 6 Aug 2018 04:49:07 +0300 Subject: [PATCH 10/25] fix(macos): stabilization with connection pool --- src/background/mysql.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/background/mysql.js b/src/background/mysql.js index 0cd9b64..3add985 100644 --- a/src/background/mysql.js +++ b/src/background/mysql.js @@ -115,7 +115,8 @@ const expand = (sphinx) => { const pool = () => { let sphinx = mysql.createPool({ - connectionLimit: config.sphinx.connectionLimit, + // bug under mac with some problems on big connection size, limit this to very low value on mac os x + connectionLimit: process.platform === 'darwin' ? 3 : config.sphinx.connectionLimit, host : config.sphinx.host, port : config.sphinx.port }); From 701a3008cab47bd0faf64bcfa58a6307a181d983 Mon Sep 17 00:00:00 2001 From: Alexey Kasyanchuk Date: Mon, 6 Aug 2018 05:10:40 +0300 Subject: [PATCH 11/25] release --- .gitignore | 1 - 1 file changed, 1 deletion(-) diff --git a/.gitignore b/.gitignore index 86bcd3c..f8b180d 100644 --- a/.gitignore +++ b/.gitignore @@ -17,4 +17,3 @@ sphinx.conf /app/app.js /app/background.js /app/**/*.map - From c7a69742cf93992b3884cdf7560b790a9afdb2f0 Mon Sep 17 00:00:00 2001 From: semantic-release-bot Date: Mon, 6 Aug 2018 02:15:25 +0000 Subject: [PATCH 12/25] chore(release): 0.27.0 [skip ci] # [0.27.0](https://github.com/DEgITx/rats-search/compare/v0.26.2...v0.27.0) (2018-08-06) ### Bug Fixes * **background:** one closing pattern ([63158dc](https://github.com/DEgITx/rats-search/commit/63158dc)) * **closing:** window can be closing on event ([84e9573](https://github.com/DEgITx/rats-search/commit/84e9573)) * **gui:** top tabs text overlap ([45168a2](https://github.com/DEgITx/rats-search/commit/45168a2)) * **linux:** fix closing on linux ([75ad00a](https://github.com/DEgITx/rats-search/commit/75ad00a)) * **linux:** fix console control after exit ([29cd05a](https://github.com/DEgITx/rats-search/commit/29cd05a)) * **macos:** fix crashes under Mac OS X ([015447c](https://github.com/DEgITx/rats-search/commit/015447c)) * **macos:** stabilization with connection pool ([769521f](https://github.com/DEgITx/rats-search/commit/769521f)) * **scanner:** fix enconding names in some cases [#55](https://github.com/DEgITx/rats-search/issues/55) ([f1043eb](https://github.com/DEgITx/rats-search/commit/f1043eb)) * **server:** fix exit on server version [#54](https://github.com/DEgITx/rats-search/issues/54) [#52](https://github.com/DEgITx/rats-search/issues/52) ([4109ef9](https://github.com/DEgITx/rats-search/commit/4109ef9)) * **translations:** hash translation ([f5a6f17](https://github.com/DEgITx/rats-search/commit/f5a6f17)) ### Features * **cleaning:** fix cleaning checking and removing torrents (also display cleaning status in more details) [#52](https://github.com/DEgITx/rats-search/issues/52) ([7e0c565](https://github.com/DEgITx/rats-search/commit/7e0c565)) * **closing:** fast window closing/hiding ([019700e](https://github.com/DEgITx/rats-search/commit/019700e)) * **search:** add remote torrents in db via dht and search requests ([1e44164](https://github.com/DEgITx/rats-search/commit/1e44164)) * **search:** hash/magnet search support in db ([1e57789](https://github.com/DEgITx/rats-search/commit/1e57789)) * **torrents:** add support for dropping torrent to base just with window ([6d82291](https://github.com/DEgITx/rats-search/commit/6d82291)) ### Performance Improvements * **replication:** replication thread optimization ([c5427a6](https://github.com/DEgITx/rats-search/commit/c5427a6)) --- CHANGELOG.md | 30 ++++++++++++++++++++++++++++++ package.json | 2 +- 2 files changed, 31 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 77a4c8c..e1ec538 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,33 @@ +# [0.27.0](https://github.com/DEgITx/rats-search/compare/v0.26.2...v0.27.0) (2018-08-06) + + +### Bug Fixes + +* **background:** one closing pattern ([63158dc](https://github.com/DEgITx/rats-search/commit/63158dc)) +* **closing:** window can be closing on event ([84e9573](https://github.com/DEgITx/rats-search/commit/84e9573)) +* **gui:** top tabs text overlap ([45168a2](https://github.com/DEgITx/rats-search/commit/45168a2)) +* **linux:** fix closing on linux ([75ad00a](https://github.com/DEgITx/rats-search/commit/75ad00a)) +* **linux:** fix console control after exit ([29cd05a](https://github.com/DEgITx/rats-search/commit/29cd05a)) +* **macos:** fix crashes under Mac OS X ([015447c](https://github.com/DEgITx/rats-search/commit/015447c)) +* **macos:** stabilization with connection pool ([769521f](https://github.com/DEgITx/rats-search/commit/769521f)) +* **scanner:** fix enconding names in some cases [#55](https://github.com/DEgITx/rats-search/issues/55) ([f1043eb](https://github.com/DEgITx/rats-search/commit/f1043eb)) +* **server:** fix exit on server version [#54](https://github.com/DEgITx/rats-search/issues/54) [#52](https://github.com/DEgITx/rats-search/issues/52) ([4109ef9](https://github.com/DEgITx/rats-search/commit/4109ef9)) +* **translations:** hash translation ([f5a6f17](https://github.com/DEgITx/rats-search/commit/f5a6f17)) + + +### Features + +* **cleaning:** fix cleaning checking and removing torrents (also display cleaning status in more details) [#52](https://github.com/DEgITx/rats-search/issues/52) ([7e0c565](https://github.com/DEgITx/rats-search/commit/7e0c565)) +* **closing:** fast window closing/hiding ([019700e](https://github.com/DEgITx/rats-search/commit/019700e)) +* **search:** add remote torrents in db via dht and search requests ([1e44164](https://github.com/DEgITx/rats-search/commit/1e44164)) +* **search:** hash/magnet search support in db ([1e57789](https://github.com/DEgITx/rats-search/commit/1e57789)) +* **torrents:** add support for dropping torrent to base just with window ([6d82291](https://github.com/DEgITx/rats-search/commit/6d82291)) + + +### Performance Improvements + +* **replication:** replication thread optimization ([c5427a6](https://github.com/DEgITx/rats-search/commit/c5427a6)) + ## [0.26.2](https://github.com/DEgITx/rats-search/compare/v0.26.1...v0.26.2) (2018-07-22) diff --git a/package.json b/package.json index 21e4fe0..135715a 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "name": "rats-search", "productName": "Rats on The Boat", "description": "P2P BitTorrent search engine", - "version": "0.26.2", + "version": "0.27.0", "private": true, "author": "Alexey Kasyanchuk ", "copyright": "Copyright © 2018 Alexey Kasyanchuk", From 35f7d33e8f2e028c74dcd91646eb6cfa5589dfc2 Mon Sep 17 00:00:00 2001 From: Alexey Kasyanchuk Date: Mon, 6 Aug 2018 17:18:52 +0300 Subject: [PATCH 13/25] perf(replication): replicate number accordion to cpu usage --- src/background/api.js | 6 +++- src/background/bt/cpu-usage-global.js | 51 +++++++++++++++++++++++++++ 2 files changed, 56 insertions(+), 1 deletion(-) create mode 100644 src/background/bt/cpu-usage-global.js diff --git a/src/background/api.js b/src/background/api.js index f7b0ce3..81d68aa 100644 --- a/src/background/api.js +++ b/src/background/api.js @@ -4,6 +4,7 @@ const compareVersions = require('compare-versions'); const getTorrent = require('./gettorrent') const _ = require('lodash') const asyncForEach = require('./asyncForEach') +const cpuUsage = require('./bt/cpu-usage-global') module.exports = async ({ sphinx, @@ -213,7 +214,10 @@ module.exports = async ({ if(sphinxSingle.state === 'disconnected') return - sphinxSingle.query('SELECT * FROM `torrents` ORDER BY rand() limit 5', (error, torrents) => { + const cpu = cpuUsage() + const limit = Math.max(1, 5 - (cpu / 20) | 0) + + sphinxSingle.query(`SELECT * FROM torrents ORDER BY rand() limit ${limit}`, (error, torrents) => { if(!torrents || torrents.length == 0) { callback(undefined) return; diff --git a/src/background/bt/cpu-usage-global.js b/src/background/bt/cpu-usage-global.js new file mode 100644 index 0000000..3d1a5b2 --- /dev/null +++ b/src/background/bt/cpu-usage-global.js @@ -0,0 +1,51 @@ +var os = require("os"); + +//Create function to get CPU information +function cpuAverage() { + + //Initialise sum of idle and time of cores and fetch CPU info + let totalIdle = 0, totalTick = 0; + const cpus = os.cpus(); + + //Loop through CPU cores + for(let i = 0, len = cpus.length; i < len; i++) { + + //Select CPU core + const cpu = cpus[i]; + + //Total up the time in the cores tick + for(const type in cpu.times) { + totalTick += cpu.times[type]; + } + + //Total up the idle time of the core + totalIdle += cpu.times.idle; + } + + //Return the average Idle and Tick times + return {idle: totalIdle / cpus.length, total: totalTick / cpus.length}; +} + +//Grab first CPU Measure +let startMeasure = cpuAverage(); +let percentageCPU = 0 + +//Set delay for second Measure +const cpuTimer = setInterval(function() { + + //Grab second Measure + const endMeasure = cpuAverage(); + + //Calculate the difference in idle and total time between the measures + const idleDifference = endMeasure.idle - startMeasure.idle; + const totalDifference = endMeasure.total - startMeasure.total; + + //Calculate the average percentage CPU usage + percentageCPU = 100 - ~~(100 * idleDifference / totalDifference); + startMeasure = endMeasure + +}, 300); + +cpuTimer.unref() + +module.exports = () => percentageCPU \ No newline at end of file From e7b035a1a8a630db8ff06db40a8062db43f0023e Mon Sep 17 00:00:00 2001 From: Alexey Kasyanchuk Date: Mon, 6 Aug 2018 17:56:21 +0300 Subject: [PATCH 14/25] perf(torrents): ability to disable integrity check on torrents adding torrents #47 This option will optimize db usage performance on big databases --- src/app/config-page.js | 12 +++++++ src/background/config.js | 1 + src/background/spider.js | 76 ++++++++++++++++++++++++---------------- translations/en.json | 4 ++- translations/ru.json | 4 ++- translations/ua.json | 4 ++- 6 files changed, 68 insertions(+), 33 deletions(-) diff --git a/src/app/config-page.js b/src/app/config-page.js index 1c47074..bddc750 100644 --- a/src/app/config-page.js +++ b/src/app/config-page.js @@ -247,6 +247,18 @@ export default class ConfigPage extends Page { />
* {__('Enable torrents replication from another rats clients. Dont recomended if torrent scanner works correct')}.
+
+ { + this.options.recheckFilesOnAdding = checked + this.forceUpdate() + }} + /> +
* {__('Enable database torrents files intergrity check on adding each torrent. Disable this will free some cpu usage on adding operation.')}
+
{__('Torrent network scanner settings')}:
diff --git a/src/background/config.js b/src/background/config.js index 038898c..f0094fc 100644 --- a/src/background/config.js +++ b/src/background/config.js @@ -59,6 +59,7 @@ let config = { cleanupDiscLimit: 7 * 1024 * 1024 * 1024, spaceQuota: false, spaceDiskLimit: 7 * 1024 * 1024 * 1024, + recheckFilesOnAdding: true, dbPath: '', diff --git a/src/background/spider.js b/src/background/spider.js index 997e5fc..6b1cee9 100644 --- a/src/background/spider.js +++ b/src/background/spider.js @@ -384,6 +384,42 @@ module.exports = function (send, recive, dataDirectory, version, env) torrent.id = torrentsId++; + const recheckFiles = (callback) => { + sphinxSingle.query('SELECT count(*) as files_count FROM files WHERE hash = ?', [torrent.hash], function(err, rows) { + if(!rows) + return + + const db_files = rows[0]['files_count']; + if(db_files !== torrent.files) + { + callback() + } + }) + } + + const addFilesToDatabase = () => { + sphinxSingle.query('DELETE FROM files WHERE hash = ?', torrent.hash, function (err, result) { + if(err) + { + return; + } + + filesList.forEach((file) => { + file.id = filesId++; + file.pathIndex = file.path; + }); + + sphinxSingle.insertValues('files', filesList, function(err, result) { + if(!result) { + console.error(err); + return + } + if(!silent) + send('filesReady', torrent.hash); + }); + }) + } + sphinxSingle.query("SELECT id FROM torrents WHERE hash = ?", torrent.hash, (err, single) => { if(!single) { @@ -392,11 +428,21 @@ module.exports = function (send, recive, dataDirectory, version, env) return } + // torrent already probably in db if(single.length > 0) { + if(config.recheckFilesOnAdding) + { + // recheck files and if they not ok add their to database + recheckFiles(addFilesToDatabase) + } resolve() return } + else + { + addFilesToDatabase() + } torrent.nameIndex = torrent.name @@ -423,36 +469,6 @@ module.exports = function (send, recive, dataDirectory, version, env) events.emit('insert', torrent) }); }) - - sphinxSingle.query('SELECT count(*) as files_count FROM files WHERE hash = ?', [torrent.hash], function(err, rows) { - if(!rows) - return - - const db_files = rows[0]['files_count']; - if(db_files !== torrent.files) - { - sphinxSingle.query('DELETE FROM files WHERE hash = ?', torrent.hash, function (err, result) { - if(err) - { - return; - } - - filesList.forEach((file) => { - file.id = filesId++; - file.pathIndex = file.path; - }); - - sphinxSingle.insertValues('files', filesList, function(err, result) { - if(!result) { - console.error(err); - return - } - if(!silent) - send('filesReady', torrent.hash); - }); - }) - } - }) }) const removeTorrentFromDB = async (torrent) => { diff --git a/translations/en.json b/translations/en.json index cbed4e0..ec5cc21 100644 --- a/translations/en.json +++ b/translations/en.json @@ -185,6 +185,8 @@ "calculation": "calculation", "removing": "removing", "Torrents cleaned": "Torrents cleaned", - "or with hash": "or with hash" + "or with hash": "or with hash", + "Check torrent files intergrity": "Check torrent files intergrity", + "Enable database torrents files intergrity check on adding each torrent. Disable this will free some cpu usage on adding operation.": "Enable database torrents files intergrity check on adding each torrent. Disable this will free some cpu usage on adding operation." } } \ No newline at end of file diff --git a/translations/ru.json b/translations/ru.json index 691d83f..8859197 100644 --- a/translations/ru.json +++ b/translations/ru.json @@ -185,6 +185,8 @@ "calculation": "подсчитывается", "removing": "удаляется", "Torrents cleaned": "Торренты очещены", - "or with hash": "или по хэшу" + "or with hash": "или по хэшу", + "Check torrent files intergrity": "Проверка целостности файлов", + "Enable database torrents files intergrity check on adding each torrent. Disable this will free some cpu usage on adding operation.": "Включить проверку целостности файлов в базе при добавлении каждого торрента. Отключение этой опции освободит некоторорое количество ресурсов процессора при добавлении." } } \ No newline at end of file diff --git a/translations/ua.json b/translations/ua.json index c26b4af..795ca4d 100644 --- a/translations/ua.json +++ b/translations/ua.json @@ -185,6 +185,8 @@ "calculation": "calculation", "removing": "removing", "Torrents cleaned": "Torrents cleaned", - "or with hash": "or with hash" + "or with hash": "or with hash", + "Check torrent files intergrity": "Check torrent files intergrity", + "Enable database torrents files intergrity check on adding each torrent. Disable this will free some cpu usage on adding operation.": "Enable database torrents files intergrity check on adding each torrent. Disable this will free some cpu usage on adding operation." } } \ No newline at end of file From aa68200bc585378c9c6736b0d0dd1de8b3472d17 Mon Sep 17 00:00:00 2001 From: Alexey Kasyanchuk Date: Tue, 7 Aug 2018 01:21:00 +0300 Subject: [PATCH 15/25] feat(log): color log --- package-lock.json | 7 ++++++- package.json | 1 + src/background/background.js | 39 +++++++++++++++++++++++++++--------- 3 files changed, 36 insertions(+), 11 deletions(-) diff --git a/package-lock.json b/package-lock.json index 0fc30b3..fe4425b 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,6 +1,6 @@ { "name": "rats-search", - "version": "0.26.2", + "version": "0.27.0", "lockfileVersion": 1, "requires": true, "dependencies": { @@ -1964,6 +1964,11 @@ "integrity": "sha1-SlKCrBZHKek2Gbz9OtFR+BfOkfU=", "dev": true }, + "ansi-256-colors": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/ansi-256-colors/-/ansi-256-colors-1.1.0.tgz", + "integrity": "sha1-kQ3lDvzHwJ49gvL4er1rcAwYgYo=" + }, "ansi-align": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ansi-align/-/ansi-align-2.0.0.tgz", diff --git a/package.json b/package.json index 135715a..44434e2 100644 --- a/package.json +++ b/package.json @@ -111,6 +111,7 @@ "buildweb": "node src/background/webpack.js" }, "dependencies": { + "ansi-256-colors": "^1.1.0", "bencode": "^2.0.0", "bitfield": "^2.0.0", "compare-versions": "^3.1.0", diff --git a/src/background/background.js b/src/background/background.js index 347d797..f4ed799 100644 --- a/src/background/background.js +++ b/src/background/background.js @@ -78,25 +78,44 @@ if (!fs.existsSync(app.getPath("userData"))){ const logFile = fs.createWriteStream(app.getPath("userData") + '/rats.log', {flags : 'w'}); const logStdout = process.stdout; +const colors = require('ansi-256-colors'); +const stringHashCode = (str) => { + let hash = 0, i, chr; + if (str.length === 0) + return hash; + for (i = 0; i < str.length; i++) { + chr = str.charCodeAt(i); + hash = ((hash << 5) - hash) + chr; + hash |= 0; // Convert to 32bit integer + } + return hash; +}; + console.log = (...d) => { const date = (new Date).toLocaleTimeString() logFile.write(`[${date}] ` + util.format(...d) + '\n'); logStdout.write(util.format(...d) + '\n'); }; +global.logT = (type, ...d) => { + const date = (new Date).toLocaleTimeString() + logFile.write(`[${date}] [${type}] ` + util.format(...d) + '\n'); + logStdout.write(colors.fg.codes[Math.abs(stringHashCode(type)) % 256] + `[${type}]` + colors.reset + ' ' + util.format(...d) + '\n'); +} + // print os info -console.log('Rats', app.getVersion()) -console.log('Platform:', os.platform()) -console.log('Arch:', os.arch()) -console.log('OS Release:', os.release()) -console.log('CPU:', os.cpus()[0].model) -console.log('CPU Logic cores:', os.cpus().length) -console.log('Total memory:', (os.totalmem() / (1024 * 1024)).toFixed(2), 'MB') -console.log('Free memory:', (os.freemem() / (1024 * 1024)).toFixed(2), 'MB') -console.log('NodeJS:', process.version) +logT('system', 'Rats', app.getVersion()) +logT('system', 'Platform:', os.platform()) +logT('system', 'Arch:', os.arch()) +logT('system', 'OS Release:', os.release()) +logT('system', 'CPU:', os.cpus()[0].model) +logT('system', 'CPU Logic cores:', os.cpus().length) +logT('system', 'Total memory:', (os.totalmem() / (1024 * 1024)).toFixed(2), 'MB') +logT('system', 'Free memory:', (os.freemem() / (1024 * 1024)).toFixed(2), 'MB') +logT('system', 'NodeJS:', process.version) if(portative) - console.log('portative compability') + logT('system', 'portative compability') // handle promise rejections process.on('unhandledRejection', r => console.log('Rejection:', r)); From 5bdab1b516aa32264e1e1da9434f0a3d41788efd Mon Sep 17 00:00:00 2001 From: Alexey Kasyanchuk Date: Tue, 7 Aug 2018 01:43:37 +0300 Subject: [PATCH 16/25] fix(log): color messages (part 1) --- src/background/feed.js | 6 ++-- src/background/forBigTable.js | 2 +- src/background/mysql.js | 4 +-- src/background/p2p.js | 24 ++++++------- src/background/sphinx.js | 20 +++++------ src/background/spider.js | 66 +++++++++++++++++------------------ src/background/ssh.js | 6 ++-- src/background/store.js | 24 ++++++------- 8 files changed, 76 insertions(+), 76 deletions(-) diff --git a/src/background/feed.js b/src/background/feed.js index 4ed6e5e..39114d1 100644 --- a/src/background/feed.js +++ b/src/background/feed.js @@ -19,7 +19,7 @@ module.exports = class Feed { if(!this.loaded) return // feed not loaded on begining, ignore saving - console.log('saving feed') + logT('feed', 'saving feed') config.feedDate = this.feedDate await this.sphinx.query('delete from feed where id > 0') let id = 0 @@ -40,12 +40,12 @@ module.exports = class Feed { this._order() this.feedDate = config.feedDate this.loaded = true - console.log('lodead feed') + logT('feed', 'lodead feed') } clear() { - console.log('clearing feed') + logT('feed', 'clearing feed') this.feed = [] } diff --git a/src/background/forBigTable.js b/src/background/forBigTable.js index 24451c6..8da4114 100644 --- a/src/background/forBigTable.js +++ b/src/background/forBigTable.js @@ -3,7 +3,7 @@ module.exports = (sphinx, table, callback, doneCallback, max = 1000, where = '') sphinx.query(`SELECT * FROM ${table} WHERE id > ${index} ${where} LIMIT ${max}`, (err, torrents) => { const finish = () => { if(err) - console.log('big table parse error', err) + logT('sql', 'big table parse error', err) if(doneCallback) doneCallback(true) done(true) diff --git a/src/background/mysql.js b/src/background/mysql.js index 3add985..f5b88ff 100644 --- a/src/background/mysql.js +++ b/src/background/mysql.js @@ -167,9 +167,9 @@ const single = (callback) => { }); mysqlSingle._mysql.on('error', (err) => { - console.log('db error', err); + logT('sql', 'db error', err); if(err.code === 'PROTOCOL_CONNECTION_LOST') { // Connection to the MySQL server is usually - console.log('restart single sql connection') + logT('sql', 'restart single sql connection') mysqlSingle._mysql = undefined start(); // lost due to either server restart, or a } else { // connnection idle timeout (the wait_timeout diff --git a/src/background/p2p.js b/src/background/p2p.js index 7685bd0..7a4c1fc 100644 --- a/src/background/p2p.js +++ b/src/background/p2p.js @@ -23,7 +23,7 @@ class p2p { this.info = {} if(!config.peerId) { - console.log('generate peerId') + logT('p2p', 'generate peerId') config.peerId = Math.random().toString(36).substring(2, 15) + Math.random().toString(36).substring(2, 15) } this.peerId = config.peerId; @@ -44,7 +44,7 @@ class p2p { this.tcpServer.on('connection', (socket) => { this.tcpServer.getConnections((err,con) => { - console.log('server connected', con, 'max', this.tcpServer.maxConnections) + logT('p2p', 'server connected', con, 'max', this.tcpServer.maxConnections) }) socket = new JsonSocket(socket); this.clients.push(socket) @@ -108,7 +108,7 @@ class p2p { // new peer with peer exchange this.on('peer', (peer) => { - console.log('got peer exchange', peer) + logT('p2p', 'got peer exchange', peer) this.add(peer) }) @@ -124,7 +124,7 @@ class p2p { if (alias >= 1) { // nothing } else { - console.log('ignore local address', iface.address); + logT('p2p', 'ignore local address', iface.address); this.ignore(iface.address) } ++alias; @@ -133,7 +133,7 @@ class p2p { } listen() { - console.log('listen p2p on', config.spiderPort, 'port') + logT('p2p', 'listen p2p on', config.spiderPort, 'port') this.tcpServer.listen(config.spiderPort, '0.0.0.0'); } @@ -145,17 +145,17 @@ class p2p { // all ok don't need to start any ssh tunnels if(isAvailable) { - console.log('tcp p2p port is reachable - all ok') + logT('ssh', 'tcp p2p port is reachable - all ok') return; } else { - console.log('tcp p2p port is unreachable - try ssh tunnel') + logT('ssh', 'tcp p2p port is unreachable - try ssh tunnel') } if(!this.encryptor) { - console.error('something wrong with encryptor') + logT('ssh', 'something wrong with encryptor') return } @@ -170,7 +170,7 @@ class p2p { return } - console.log('ssh tunnel success, redirect peers to ssh') + logT('ssh', 'ssh tunnel success, redirect peers to ssh') this.p2pStatus = 1 this.send('p2pStatus', this.p2pStatus) @@ -185,7 +185,7 @@ class p2p { { if(this.ssh) { - console.log('closing ssh...') + logT('ssh', 'closing ssh...') this.ssh.kill() } // close server @@ -312,7 +312,7 @@ class p2p { torrents: data.info ? data.info.torrents || 0 : 0 }) this.events.emit('peer', address) - console.log('new peer', address) + logT('p2p', 'new peer', address) // add some other peers if(data.peers && data.peers.length > 0) @@ -338,7 +338,7 @@ class p2p { } this.peers.splice(index, 1); - console.log('close peer connection', address) + logT('p2p', 'close peer connection', address) } }) diff --git a/src/background/sphinx.js b/src/background/sphinx.js index 485318e..18182e2 100644 --- a/src/background/sphinx.js +++ b/src/background/sphinx.js @@ -136,8 +136,8 @@ const writeSphinxConfig = (path, dbPath) => { config = iconv.encode(config, 'win1251') fs.writeFileSync(`${path}/sphinx.conf`, config) - console.log(`writed sphinx config to ${path}`) - console.log('db path:', dbPath) + logT('sphinx', `writed sphinx config to ${path}`) + logT('sphinx', 'db path:', dbPath) return {isInitDb} } @@ -146,7 +146,7 @@ module.exports = (callback, dataDirectory, onClose) => { const start = (callback) => { const sphinxPath = path.resolve(appPath('searchd')) - console.log('Sphinx Path:', sphinxPath) + logT('sphinx', 'Sphinx Path:', sphinxPath) const sphinxConfigDirectory = dataDirectory appConfig['dbPath'] = appConfig.dbPath && appConfig.dbPath.length > 0 ? appConfig.dbPath : sphinxConfigDirectory; @@ -174,14 +174,14 @@ module.exports = (callback, dataDirectory, onClose) => { const optimizeResolvers = {} sphinx.stdout.on('data', (data) => { - console.log(`sphinx: ${data}`) + logT('sphinx', `sphinx: ${data}`) // don't listen if we are in fixing mode if(sphinx.fixing) return if (data.includes('accepting connections')) { - console.log('catched sphinx start') + logT('sphinx', 'catched sphinx start') if(callback) callback() } @@ -196,14 +196,14 @@ module.exports = (callback, dataDirectory, onClose) => { { if(optimizeResolvers[checkOptimized[1]]) { - console.log('resolve optimizer', checkOptimized[1]) + logT('sphinx', 'resolve optimizer', checkOptimized[1]) optimizeResolvers[checkOptimized[1]]() } } }) sphinx.on('close', (code, signal) => { - console.log(`sphinx closed with code ${code} and signal ${signal}`) + logT('sphinx', `sphinx closed with code ${code} and signal ${signal}`) if(onClose && !sphinx.replaceOnClose) // sometime we don't want to call default callback onClose() if(sphinx.onClose) @@ -211,7 +211,7 @@ module.exports = (callback, dataDirectory, onClose) => { }) sphinx.stop = (onFinish, replaceFinish) => { - console.log('sphinx closing...') + logT('sphinx', 'sphinx closing...') if(onFinish) sphinx.onClose = onFinish if(replaceFinish) @@ -234,7 +234,7 @@ module.exports = (callback, dataDirectory, onClose) => { // close db await new Promise((resolve) => { sphinx.stop(resolve, true) - console.log('revent start') + logT('sphinx', 'revent start') }) const checkNullFile = (file) => new Promise((resolve) => { @@ -258,7 +258,7 @@ module.exports = (callback, dataDirectory, onClose) => { brokenFiles = probablyCoruptedFiles.filter((file, index) => !brokenFiles[index]) brokenFiles.forEach(file => { - console.log('FIXDB: clean file because of broken', file) + logT('sphinx', 'FIXDB: clean file because of broken', file) fs.unlinkSync(file) }) diff --git a/src/background/spider.js b/src/background/spider.js index 6b1cee9..314119f 100644 --- a/src/background/spider.js +++ b/src/background/spider.js @@ -113,7 +113,7 @@ module.exports = function (send, recive, dataDirectory, version, env) if(peers && peers.length > 0) { peers.forEach(peer => p2p.add(peer)) - console.log('loaded', peers.length, 'peers') + logT('p2p', 'loaded', peers.length, 'peers') } } @@ -129,7 +129,7 @@ module.exports = function (send, recive, dataDirectory, version, env) resolve(data.length > 0 && JSON.parse(data)) }); }).on("error", (err) => { - console.log(`${url} error: ` + err.message) + logT('http', `${url} error: ` + err.message) resolve(false) }); }) @@ -145,7 +145,7 @@ module.exports = function (send, recive, dataDirectory, version, env) if(peers && peers.length > 0) { peers.forEach(peer => p2p.add(peer)) - console.log('loaded', peers.length, 'peers from bootstrap') + logT('p2p', 'loaded', peers.length, 'peers from bootstrap') } } if(json.bootstrapMap) @@ -165,7 +165,7 @@ module.exports = function (send, recive, dataDirectory, version, env) } } } - console.log('loaded peers map from bootstrap') + logT('p2p', 'loaded peers map from bootstrap') } } @@ -184,7 +184,7 @@ module.exports = function (send, recive, dataDirectory, version, env) p2pBootstrapLoop = setInterval(() => { if(p2p.size === 0) { - console.log('load peers from bootstap again because no peers at this moment') + logT('p2p', 'load peers from bootstap again because no peers at this moment') loadBootstrap() } }, 90000) // try to load new peers if there is no one found @@ -282,7 +282,7 @@ module.exports = function (send, recive, dataDirectory, version, env) const checkTorrent = (torrent) => { if(config.filters.maxFiles > 0 && torrent.files > config.filters.maxFiles) { - console.log('ignore', torrent.name, 'because files', torrent.files, '>', config.filters.maxFiles) + logT('check', 'ignore', torrent.name, 'because files', torrent.files, '>', config.filters.maxFiles) return false } @@ -292,37 +292,37 @@ module.exports = function (send, recive, dataDirectory, version, env) const rx = new RegExp(nameRX) if(!config.filters.namingRegExpNegative && !rx.test(torrent.name)) { - console.log('ignore', torrent.name, 'by naming rx') + logT('check', 'ignore', torrent.name, 'by naming rx') return false } else if(config.filters.namingRegExpNegative && rx.test(torrent.name)) { - console.log('ignore', torrent.name, 'by naming rx negative') + logT('check', 'ignore', torrent.name, 'by naming rx negative') return false } } if(torrent.contentType === 'bad') { - console.log('ignore torrent', torrent.name, 'because this is a bad thing') + logT('check', 'ignore torrent', torrent.name, 'because this is a bad thing') return false } if(config.filters.adultFilter && torrent.contentCategory === 'xxx') { - console.log('ignore torrent', torrent.name, 'because adult filter') + logT('check', 'ignore torrent', torrent.name, 'because adult filter') return false } if(config.filters.sizeEnabled && (torrent.size < config.filters.size.min || torrent.size > config.filters.size.max)) { - console.log('ignore torrent', torrent.name, 'because size bounds of', torrent.size, ':', config.filters.size) + logT('check', 'ignore torrent', torrent.name, 'because size bounds of', torrent.size, ':', config.filters.size) return false } if(config.filters.contentType && Array.isArray(config.filters.contentType) && !config.filters.contentType.includes(torrent.contentType)) { - console.log('ignore torrent', torrent.name, 'because type', torrent.contentType, 'not in:', config.filters.contentType) + logT('check', 'ignore torrent', torrent.name, 'because type', torrent.contentType, 'not in:', config.filters.contentType) return false } @@ -377,7 +377,7 @@ module.exports = function (send, recive, dataDirectory, version, env) if(!filesList || filesList.length == 0) { - console.log('skip torrent', torrent.name, '- no filesList') + logT('add', 'skip torrent', torrent.name, '- no filesList') resolve() return } @@ -423,7 +423,7 @@ module.exports = function (send, recive, dataDirectory, version, env) sphinxSingle.query("SELECT id FROM torrents WHERE hash = ?", torrent.hash, (err, single) => { if(!single) { - console.log(err) + logT('add', err) resolve() return } @@ -462,7 +462,7 @@ module.exports = function (send, recive, dataDirectory, version, env) } else { - console.log(torrent); + logT('add', torrent); console.error(err); } resolve() @@ -494,7 +494,7 @@ module.exports = function (send, recive, dataDirectory, version, env) } const insertMetadata = (metadata, infohash, rinfo) => { - console.log('finded torrent', metadata.info.name, ' and add to database'); + logT('spider', 'finded torrent', metadata.info.name, ' and add to database'); const bufferToString = (buffer) => Buffer.isBuffer(buffer) ? buffer.toString() : buffer @@ -551,7 +551,7 @@ module.exports = function (send, recive, dataDirectory, version, env) { disk.check(rootPath, function(err, info) { if (err) { - console.log(err); + logT('quota', err); } else { const {available, free, total} = info; @@ -594,7 +594,7 @@ module.exports = function (send, recive, dataDirectory, version, env) } recive('dropTorrents', (pathTorrents) => { - console.log('drop torrents and replicate from original') + logT('drop', 'drop torrents and replicate from original') const torrents = pathTorrents.map(path => parseTorrent(fs.readFileSync(path))) torrents.forEach(torrent => insertMetadata(torrent, torrent.infoHashBuffer, {address: '127.0.0.1', port: 666})) }) @@ -610,7 +610,7 @@ module.exports = function (send, recive, dataDirectory, version, env) const {address, port} = stunMsg.getAttribute(STUN_ATTR_XOR_MAPPED_ADDRESS).value stunServer.close() - console.log('p2p stun ignore my address', address) + logT('stun', 'p2p stun ignore my address', address) p2p.ignore(address) // check port avalibility @@ -632,7 +632,7 @@ module.exports = function (send, recive, dataDirectory, version, env) ttl: 0 }, function(err) { if(err) - console.log('upnp server dont respond') + logT('upnp', 'upnp server dont respond') }); upnp.portMapping({ public: config.spiderPort, @@ -642,7 +642,7 @@ module.exports = function (send, recive, dataDirectory, version, env) ttl: 0 }, function(err) { if(err) - console.log('upnp server dont respond') + logT('upnp', 'upnp server dont respond') }); upnp.portMapping({ public: config.udpTrackersPort, @@ -652,7 +652,7 @@ module.exports = function (send, recive, dataDirectory, version, env) ttl: 0 }, function(err) { if(err) - console.log('upnp server dont respond') + logT('upnp', 'upnp server dont respond') }); } @@ -677,7 +677,7 @@ module.exports = function (send, recive, dataDirectory, version, env) if(err) return - console.log('p2p upnp ignore my address', ip) + logT('upnp', 'p2p upnp ignore my address', ip) p2p.ignore(ip) }); } @@ -732,20 +732,20 @@ module.exports = function (send, recive, dataDirectory, version, env) } // load torrents sessions - console.log('restore downloading sessions') + logT('downloader', 'restore downloading sessions') torrentClient.loadSession(dataDirectory + '/downloads.json') this.stop = async (callback) => { this.closing = true - console.log('spider closing...') + logT('close', 'spider closing...') if(upnp) upnp.ratsUnmap() - console.log('closing alternative db interface') + logT('close', 'closing alternative db interface') await new Promise(resolve => sphinxSingleAlternative.end(resolve)) // save torrents sessions - console.log('save torrents downloads sessions') + logT('close', 'save torrents downloads sessions') torrentClient.saveSession(dataDirectory + '/downloads.json') // save feed @@ -755,7 +755,7 @@ module.exports = function (send, recive, dataDirectory, version, env) if(config.p2pBootstrap && p2pBootstrapLoop) { clearInterval(p2pBootstrapLoop) - console.log('bootstrap loop stoped') + logT('close', 'bootstrap loop stoped') } // safe future peers @@ -766,7 +766,7 @@ module.exports = function (send, recive, dataDirectory, version, env) if(addresses.length > 0) { fs.writeFileSync(dataDirectory + '/peers.p2p', peersEncripted, 'utf8'); - console.log('local peers saved') + logT('close', 'local peers saved') } if(config.p2pBootstrap) @@ -804,7 +804,7 @@ module.exports = function (send, recive, dataDirectory, version, env) 'Content-Type' : "application/json", } }; - console.log('bootstrap peers saved to', host) + logT('close', 'bootstrap peers saved to', host) const req = http.request(options, resolve); req.on('error', resolve) req.end(JSON.stringify({ @@ -820,7 +820,7 @@ module.exports = function (send, recive, dataDirectory, version, env) } } - console.log('closing p2p...') + logT('close', 'closing p2p...') // don't listen spider peer appears spider.removeAllListeners('peer') await p2p.close() @@ -828,11 +828,11 @@ module.exports = function (send, recive, dataDirectory, version, env) // don't listen complete torrent responses client.removeAllListeners('complete') - console.log('closing torrent client') + logT('close', 'closing torrent client') torrentClient.destroy(() => { sphinx.end(() => spider.close(() => { sphinxSingle.destroy() - console.log('spider closed') + logT('close', 'spider closed') callback() })) }) diff --git a/src/background/ssh.js b/src/background/ssh.js index 81f771c..3fe7e37 100644 --- a/src/background/ssh.js +++ b/src/background/ssh.js @@ -46,7 +46,7 @@ const startSSH = (port, host, user, password, callback) => { } ssh.stdout.on('data', (data) => { - console.log(`ssh: ${data}`) + logT('ssh', `ssh: ${data}`) checkMessage(data) if(data.includes('Store key in cache?')) { @@ -56,7 +56,7 @@ const startSSH = (port, host, user, password, callback) => { }) ssh.stderr.on('data', (data) => { - console.log(`ssh error: ${data}`); + logT('ssh', `ssh error: ${data}`); checkMessage(data) if(data.includes('Password authentication failed')) { @@ -70,7 +70,7 @@ const startSSH = (port, host, user, password, callback) => { }); ssh.on('close', (code, signal) => { - console.log(`ssh closed with code ${code} and signal ${signal}`) + logT('ssh', `ssh closed with code ${code} and signal ${signal}`) if(callback) callback(false) }) diff --git a/src/background/store.js b/src/background/store.js index 433e858..8d07fa5 100644 --- a/src/background/store.js +++ b/src/background/store.js @@ -13,7 +13,7 @@ module.exports = class P2PStore extends EventEmitter { }); this.synchronized = false - console.log('connect p2p store...') + logT('store', 'connect p2p store...') this.p2p = p2p this.sphinx = sphinx @@ -24,7 +24,7 @@ module.exports = class P2PStore extends EventEmitter { if(rows[0] && rows[0].mx >= 1) this.id = rows[0].mx; - console.log('store db index', this.id) + logT('store', 'store db index', this.id) this.p2p.events.on('peer', (peer) => { if(peer.info && peer.info.store) @@ -40,7 +40,7 @@ module.exports = class P2PStore extends EventEmitter { this.p2p.on('dbStore', (record) => { if(!record || record.id - 1 !== this.id) { - console.log('out of range peerdb store', record.id) + logT('store', 'out of range peerdb store', record.id) return } @@ -51,7 +51,7 @@ module.exports = class P2PStore extends EventEmitter { }) this.p2p.on('dbSync', ({id} = {}, callback) => { - console.log('ask to sync db from', id, 'version') + logT('store', 'ask to sync db from', id, 'version') if(typeof id === 'undefined' || id >= this.id) { callback(false) @@ -62,7 +62,7 @@ module.exports = class P2PStore extends EventEmitter { this.sphinx.query(`select * from store where id > ${id}`, (err, records) => { if(err) { - console.log(err) + logT('store', err) return } @@ -77,7 +77,7 @@ module.exports = class P2PStore extends EventEmitter { sync(peer) { - console.log('sync db on version', this.id, peer ? `from peer ${peer.peerId}` : '') + logT('store', 'sync db on version', this.id, peer ? `from peer ${peer.peerId}` : '') const processSync = (data, nil, peer) => { if(!data || !data.records) return @@ -90,7 +90,7 @@ module.exports = class P2PStore extends EventEmitter { && oldIndex < this.id // last sync update of store must be successful, otherwise no point to try sync db from this peer && this.id < data.index) { - console.log('continue sync store from', this.id, 'index', 'peer', peer.peerId) + logT('store', 'continue sync store from', this.id, 'index', 'peer', peer.peerId) peer.emit('dbSync', {id: this.id}, processSync) } } @@ -119,7 +119,7 @@ module.exports = class P2PStore extends EventEmitter { // check hash if(objectHash(record.data) !== record.hash) { - console.log('wrong hash for sync peerdb') + logT('store', 'wrong hash for sync peerdb') return } @@ -127,7 +127,7 @@ module.exports = class P2PStore extends EventEmitter { record.myself = false // push to db - console.log('sync peerdb record', record.id) + logT('store', 'sync peerdb record', record.id) this._pushToDb(record) this.id = record.id @@ -145,7 +145,7 @@ module.exports = class P2PStore extends EventEmitter { (err) => { if(err) { - console.log(err) + logT('store', err) return } @@ -159,7 +159,7 @@ module.exports = class P2PStore extends EventEmitter { { if(!this.synchronized) { - console.log('cant store item on unsync db') + logT('store', 'cant store item on unsync db') return false } @@ -177,7 +177,7 @@ module.exports = class P2PStore extends EventEmitter { temp } - console.log('store object', value.id) + logT('store', 'store object', value.id) this._pushToDb(value, () => { // store record From bf62a2de16a321d770fba59776d83d9ae6c1aa05 Mon Sep 17 00:00:00 2001 From: Alexey Kasyanchuk Date: Tue, 7 Aug 2018 02:25:10 +0300 Subject: [PATCH 17/25] fix(log): color log (part 2) --- src/background/api.js | 64 ++++++++++++++++++------------------ src/background/background.js | 12 +++---- 2 files changed, 38 insertions(+), 38 deletions(-) diff --git a/src/background/api.js b/src/background/api.js index 81d68aa..2aa6cfb 100644 --- a/src/background/api.js +++ b/src/background/api.js @@ -137,22 +137,22 @@ module.exports = async ({ // remote request if(options.peer) { - console.log('remote torrent request to peer') + logT('search', 'remote torrent request to peer') const peer = p2p.find(options.peer) if(!peer) { - console.log('dont found requested peer in peers') + logT('search', 'dont found requested peer in peers') callback(undefined) return; } delete options.peer; peer.emit('torrent', {hash, options}, (data, nil, address) => { - console.log('remote torrent result', hash) + logT('search', 'remote torrent result', hash) callback(data) if(compareVersions(address.version, '0.19.0') < 0) { - console.log('replication selected torrent now works only with 0.19.0 version, ignore this torrent') + logT('search', 'replication selected torrent now works only with 0.19.0 version, ignore this torrent') return } @@ -186,7 +186,7 @@ module.exports = async ({ }); if(torrent.good != good || torrent.bad != bad) { - console.log('finded new rating on', torrent.name, 'update votes to it') + logT('rating', 'finded new rating on', torrent.name, 'update votes to it') torrent.good = good torrent.bad = bad updateTorrentToDB(torrent) @@ -204,7 +204,7 @@ module.exports = async ({ if(config.p2pReplicationServer) { - console.log('p2p replication server enabled') + logT('replication', 'p2p replication server enabled') p2p.on('randomTorrents', (nil, callback) => { if(typeof callback != 'function') @@ -257,14 +257,14 @@ module.exports = async ({ if(compareVersions(address.version, '0.19.0') < 0) { - console.log('replication now works only with 0.19.0 version, ignore this torrent') + logT('replication', 'replication now works only with 0.19.0 version, ignore this torrent') return } gotTorrents += torrents.length torrents.forEach((torrent) => { - console.log('replicate remote torrent', torrent && torrent.name) + logT('replication', 'replicate remote torrent', torrent && torrent.name) insertTorrentToDB(torrent) }) }) @@ -272,7 +272,7 @@ module.exports = async ({ setTimeout(() => getReplicationTorrents(gotTorrents > 8 ? gotTorrents * 600 : 10000), nextTimeout) } // start - console.log('replication client is enabled') + logT('replication', 'replication client is enabled') getReplicationTorrents() } } @@ -347,13 +347,13 @@ module.exports = async ({ const isSHA1 = isSH1Hash(text) sphinx.query('SELECT * FROM `torrents` WHERE ' + (isSHA1 ? 'hash = ?' : 'MATCH(?)') + ' ' + where + ' ' + order + ' LIMIT ?,?', args, function (error, rows, fields) { if(!rows) { - console.log(error) + logT('search', error) callback(undefined) return; } if(rows.length === 0 && isSHA1 && !isP2P) // trying to get via dht { - console.log('get torrent via infohash with dht') + logT('search', 'get torrent via infohash with dht') torrentClient.getMetadata(text, (torrent) => { searchList.push(baseRowData(torrent)); callback(searchList); @@ -372,7 +372,7 @@ module.exports = async ({ recive('searchTorrent', mergeTorrentsWithDownloadsFn((text, navigation, callback) => { searchTorrentCall(text, navigation, callback) p2p.emit('searchTorrent', {text, navigation}, (remote, socketObject) => { - console.log('remote search results', remote && remote.length) + logT('search', 'remote search results', remote && remote.length) if(remote && remote.length > 0) { const { _socket: socket } = socketObject @@ -447,7 +447,7 @@ module.exports = async ({ //sphinx.query('SELECT * FROM `files` inner join torrents on(torrents.hash = files.hash) WHERE files.path like \'%' + text + '%\' ' + where + ' ' + order + ' LIMIT ?,?', args, function (error, rows, fields) { sphinx.query('SELECT * FROM `files` WHERE MATCH(?) ' + where + ' ' + order + ' LIMIT ?,?', args, function (error, files, fields) { if(!files) { - console.log(error) + logT('search', error) callback(undefined) return; } @@ -467,7 +467,7 @@ module.exports = async ({ const inSql = Object.keys(search).map(hash => sphinx.escape(hash)).join(','); sphinx.query(`SELECT * FROM torrents WHERE hash IN(${inSql})`, (err, torrents) => { if(!torrents) { - console.log(err) + logT('search', err) return; } @@ -495,7 +495,7 @@ module.exports = async ({ recive('searchFiles', mergeTorrentsWithDownloadsFn((text, navigation, callback) => { searchFilesCall(text, navigation, callback) p2p.emit('searchFiles', {text, navigation}, (remote, socketObject) => { - console.log('remote search files results', remote && remote.length) + logT('search', 'remote search files results', remote && remote.length) if(remote && remote.length > 0) { const { _socket: socket } = socketObject @@ -571,7 +571,7 @@ module.exports = async ({ { topTorrentsCall(type, navigation, callback) p2p.emit('topTorrents', {type, navigation}, (remote, socketObject) => { - console.log('remote top results', remote && remote.length) + logT('top', 'remote top results', remote && remote.length) if(remote && remote.length > 0) { const { _socket: socket } = socketObject @@ -661,9 +661,9 @@ module.exports = async ({ torrentClient._add = (torrentObject, savePath, callback) => { const magnet = `magnet:?xt=urn:btih:${torrentObject.hash}` - console.log('download', magnet) + logT('downloader', 'download', magnet) if(torrentClient.get(magnet)) { - console.log('aready added') + logT('downloader', 'aready added') if(callback) callback(false) return @@ -684,7 +684,7 @@ module.exports = async ({ } torrent.on('ready', () => { - console.log('start downloading', torrent.infoHash, 'to', torrent.path) + logT('downloader', 'start downloading', torrent.infoHash, 'to', torrent.path) send('downloading', torrent.infoHash) progress(0) // immediately display progress if(torrent._paused) @@ -695,7 +695,7 @@ module.exports = async ({ }) torrent.on('done', () => { - console.log('download done', torrent.infoHash) + logT('downloader', 'download done', torrent.infoHash) progress(0) // update progress // remove torrent if marked if(torrent.removeOnDone) @@ -703,7 +703,7 @@ module.exports = async ({ torrentClient.remove(magnet, (err) => { if(err) { - console.log('download removing error', err) + logT('downloader', 'download removing error', err) return } @@ -728,7 +728,7 @@ module.exports = async ({ //custom api pause torrent._pause = () => { - console.log('pause torrent downloading', torrent.infoHash) + logT('downloader', 'pause torrent downloading', torrent.infoHash) torrent.pause() torrent.wires = []; setTimeout(() => { @@ -746,7 +746,7 @@ module.exports = async ({ } torrent._resume = () => { - console.log('resume torrent downloading', torrent.infoHash) + logT('downloader', 'resume torrent downloading', torrent.infoHash) torrent._restoreWires() torrent.resume() } @@ -776,13 +776,13 @@ module.exports = async ({ const id = torrentClientHashMap[hash] if(!id) { - console.log('cant find torrent for removing', hash) + logT('downloader', 'cant find torrent for removing', hash) return } const torrent = torrentClient.get(id) if(!torrent) { - console.log('no torrent for update founded') + logT('downloader', 'no torrent for update founded') return } @@ -811,7 +811,7 @@ module.exports = async ({ const id = torrentClientHashMap[hash] if(!id) { - console.log('cant find torrent for removing', hash) + logT('downloader', 'cant find torrent for removing', hash) if(callback) callback(false) return @@ -820,7 +820,7 @@ module.exports = async ({ torrentClient.remove(id, (err) => { if(err) { - console.log('download removing error', err) + logT('downloader', 'download removing error', err) if(callback) callback(false) return @@ -855,12 +855,12 @@ module.exports = async ({ return removeProtect = true - console.log('checktorrents call') + logT('clean', 'checktorrents call') const toRemove = [] const done = async () => { - console.log('torrents to remove founded', toRemove.length) + logT('clean', 'torrents to remove founded', toRemove.length) if(checkOnly) { callback(toRemove.length) @@ -874,7 +874,7 @@ module.exports = async ({ }) callback(toRemove.length) removeProtect = false - console.log('removed torrents by filter:', toRemove.length) + logT('clean', 'removed torrents by filter:', toRemove.length) } let i = 1 @@ -969,7 +969,7 @@ module.exports = async ({ if(!myself) { - console.log('replicate torrent from store record', torrent.hash) + logT('store', 'replicate torrent from store record', torrent.hash) await insertTorrentToDB(torrent) } @@ -1018,7 +1018,7 @@ module.exports = async ({ if(remoteFeed.feed.length > feed.size() || (remoteFeed.feed.length == feed.size() && remoteFeed.feedDate > feed.feedDate)) { - console.log('replace our feed with remote feed') + logT('feed', 'replace our feed with remote feed') feed.feed = remoteFeed.feed feed.feedDate = remoteFeed.feedDate || 0 // it can be new torrents replicate all diff --git a/src/background/background.js b/src/background/background.js index f4ed799..67e24de 100644 --- a/src/background/background.js +++ b/src/background/background.js @@ -122,7 +122,7 @@ process.on('unhandledRejection', r => console.log('Rejection:', r)); const shouldQuit = app.makeSingleInstance(function(commandLine, workingDirectory) { // Someone tried to run a second instance, we should focus our window. - console.log('openned second application, just focus this one') + logT('app', 'openned second application, just focus this one') if (mainWindow) { if (mainWindow.isMinimized()) mainWindow.restore(); @@ -131,7 +131,7 @@ const shouldQuit = app.makeSingleInstance(function(commandLine, workingDirectory }); if (shouldQuit) { - console.log('closed because of second application') + logT('app', 'closed because of second application') app.exit(0); } @@ -141,12 +141,12 @@ log.transports.file.level = false; log.transports.console.level = false; log.transports.console = function(msg) { const text = util.format.apply(util, msg.data); - console.log(text); + logT('updater', text); }; autoUpdater.logger = log; autoUpdater.on('update-downloaded', () => { - console.log('update-downloaded lats quitAndInstall'); + logT('updater', 'update-downloaded lats quitAndInstall'); if (env.name === "production") { dialog.showMessageBox({ type: 'info', @@ -240,7 +240,7 @@ app.on("ready", () => { checkInternet(enabled => { if(!enabled) { - console.log('no internet connection were founded, updater not started') + logT('updater', 'no internet connection were founded, updater not started') return } @@ -249,7 +249,7 @@ app.on("ready", () => { autoUpdater.getUpdateInfo().then(info => { if(info.version == app.getVersion()) { - console.log('update not founded for version', app.getVersion()) + logT('updater', 'update not founded for version', app.getVersion()) return } From 9981c9706536a1c714affe4f73c574f27f978f96 Mon Sep 17 00:00:00 2001 From: Alexey Kasyanchuk Date: Tue, 7 Aug 2018 02:41:28 +0300 Subject: [PATCH 18/25] fix(log): color log (part 3) --- src/background/bt/spider.js | 2 +- src/background/bt/udp-tracker-request.js | 8 ++-- src/background/dbPatcher.js | 52 ++++++++++++------------ src/background/mysql.js | 2 +- src/background/torrentClient.js | 12 +++--- 5 files changed, 38 insertions(+), 38 deletions(-) diff --git a/src/background/bt/spider.js b/src/background/bt/spider.js index 40ff4c4..4c1c09b 100644 --- a/src/background/bt/spider.js +++ b/src/background/bt/spider.js @@ -285,7 +285,7 @@ class Spider extends Emiter { this.udp = dgram.createSocket('udp4') this.udp.bind(port) this.udp.on('listening', () => { - console.log(`Listen DHT protocol on ${this.udp.address().address}:${this.udp.address().port}`) + logT('spider', `Listen DHT protocol on ${this.udp.address().address}:${this.udp.address().port}`) }) this.udp.on('message', (data, addr) => { this.parse(data, addr) diff --git a/src/background/bt/udp-tracker-request.js b/src/background/bt/udp-tracker-request.js index 1c247b8..3a1af26 100644 --- a/src/background/bt/udp-tracker-request.js +++ b/src/background/bt/udp-tracker-request.js @@ -15,7 +15,7 @@ const requests = {}; let message = function (buf, host, port) { server.send(buf, 0, buf.length, port, host, function(err, bytes) { if (err) { - console.log(err.message); + logT('udp-tracker', err.message); } }); }; @@ -69,7 +69,7 @@ let scrapeTorrent = function (connectionIdHigh, connectionIdLow, transactionId) message(buffer, connection.host, connection.port); } catch(error) { - console.log('ERROR on scrape', error) + logT('udp-tracker', 'ERROR on scrape', error) } }; @@ -112,7 +112,7 @@ server.on("message", function (msg, rinfo) { delete requests[transactionId]; } else if (action === ACTION_ERROR) { delete requests[transactionId]; - console.log("error in scrape response"); + logT('udp-tracker', "error in scrape response"); } }); @@ -125,7 +125,7 @@ let getPeersStatistic = (host, port, hash, callback) => { server.on("listening", function () { var address = server.address(); - console.log("listening udp tracker respose on " + address.address + ":" + address.port); + logT('udp-tracker', "listening udp tracker respose on " + address.address + ":" + address.port); }); server.bind(config.udpTrackersPort); diff --git a/src/background/dbPatcher.js b/src/background/dbPatcher.js index 75c0188..867a83a 100644 --- a/src/background/dbPatcher.js +++ b/src/background/dbPatcher.js @@ -103,12 +103,12 @@ module.exports = async (callback, mainWindow, sphinxApp) => { } const patch = async (version) => { - console.log('db version', version) + logT('patcher', 'db version', version) switch(version) { case 1: { - console.log('patch db to version 2') + logT('patcher', 'patch db to version 2') openPatchWindow() let i = 1 @@ -116,7 +116,7 @@ module.exports = async (callback, mainWindow, sphinxApp) => { const files = (await sphinx.query("SELECT COUNT(*) AS c FROM files"))[0].c await forBigTable(sphinx, 'torrents', async (torrent) => { - console.log('update index', torrent.id, torrent.name, '[', i, 'of', torrents, ']') + logT('patcher', 'update index', torrent.id, torrent.name, '[', i, 'of', torrents, ']') if(patchWindow) patchWindow.webContents.send('reindex', {field: torrent.name, index: i++, all: torrents, torrent: true}) @@ -126,7 +126,7 @@ module.exports = async (callback, mainWindow, sphinxApp) => { }) i = 1 await forBigTable(sphinx, 'files', async (file) => { - console.log('update index', file.id, file.path, '[', i, 'of', files, ']') + logT('patcher', 'update index', file.id, file.path, '[', i, 'of', files, ']') if(patchWindow) patchWindow.webContents.send('reindex', {field: file.path, index: i++, all: files}) @@ -141,13 +141,13 @@ module.exports = async (callback, mainWindow, sphinxApp) => { { openPatchWindow() - console.log('optimizing torrents') + logT('patcher', 'optimizing torrents') if(patchWindow) patchWindow.webContents.send('optimize', {field: 'torrents'}) sphinx.query(`OPTIMIZE INDEX torrents`) await sphinxApp.waitOptimized('torrents') - console.log('optimizing files') + logT('patcher', 'optimizing files') if(patchWindow) patchWindow.webContents.send('optimize', {field: 'files'}) sphinx.query(`OPTIMIZE INDEX files`) @@ -165,7 +165,7 @@ module.exports = async (callback, mainWindow, sphinxApp) => { let i = 1 const torrents = (await sphinx.query("SELECT COUNT(*) AS c FROM torrents"))[0].c await forBigTable(sphinx, 'torrents', async (torrent) => { - console.log('update index', torrent.id, torrent.name, '[', i, 'of', torrents, '] - delete:', bad) + logT('patcher', 'update index', torrent.id, torrent.name, '[', i, 'of', torrents, '] - delete:', bad) if(patchWindow) patchWindow.webContents.send('reindex', {field: torrent.name, index: i++, all: torrents, torrent: true}) @@ -177,7 +177,7 @@ module.exports = async (callback, mainWindow, sphinxApp) => { torrentTypeDetect(torrent, torrent.filesList) if(torrent.contentType == 'bad') { - console.log('remove bad torrent', torrent.name) + logT('patcher', 'remove bad torrent', torrent.name) bad++ await sphinx.query(`DELETE FROM torrents WHERE hash = '${torrent.hash}'`) await sphinx.query(`DELETE FROM files WHERE hash = '${torrent.hash}'`) @@ -185,7 +185,7 @@ module.exports = async (callback, mainWindow, sphinxApp) => { } }) - console.log('removed', bad, 'torrents') + logT('patcher', 'removed', bad, 'torrents') await setVersion(4) } @@ -200,7 +200,7 @@ module.exports = async (callback, mainWindow, sphinxApp) => { let patch = 1 await forBigTable(sphinx, 'torrents', async (torrent) => { - console.log('remember index', torrent.id, torrent.name, '[', i, 'of', torrents, ']') + logT('patcher', 'remember index', torrent.id, torrent.name, '[', i, 'of', torrents, ']') if(patchWindow) patchWindow.webContents.send('reindex', {field: torrent.name, index: i++, all: torrents, torrent: true}) @@ -209,7 +209,7 @@ module.exports = async (callback, mainWindow, sphinxApp) => { if(torrentsArray.length >= 20000) { fs.writeFileSync(`${sphinxApp.directoryPath}/torrents.patch.${patch++}`, JSON.stringify(torrentsArray, null, 4), 'utf8'); - console.log('write torrents dump', `${sphinxApp.directoryPath}/torrents.patch.${patch-1}`) + logT('patcher', 'write torrents dump', `${sphinxApp.directoryPath}/torrents.patch.${patch-1}`) torrentsArray = [] } }) @@ -217,7 +217,7 @@ module.exports = async (callback, mainWindow, sphinxApp) => { if(torrentsArray.length > 0) { fs.writeFileSync(`${sphinxApp.directoryPath}/torrents.patch.${patch}`, JSON.stringify(torrentsArray, null, 4), 'utf8'); - console.log('write torrents dump', `${sphinxApp.directoryPath}/torrents.patch.${patch}`) + logT('patcher', 'write torrents dump', `${sphinxApp.directoryPath}/torrents.patch.${patch}`) torrentsArray = [] } else @@ -232,19 +232,19 @@ module.exports = async (callback, mainWindow, sphinxApp) => { sphinxApp.stop(resolve, true) }) - console.log('sphinx stoped for patching') + logT('patcher', 'sphinx stoped for patching') await new Promise((resolve) => { glob(`${sphinxApp.directoryPathDb}/torrents.*`, function (er, files) { files.forEach(file => { - console.log('clear torrents file', file) + logT('patcher', 'clear torrents file', file) fs.unlinkSync(path.resolve(file)) }) resolve() }) }) - console.log('cleaned torrents db structure, rectreating again') + logT('patcher', 'cleaned torrents db structure, rectreating again') i = 1 await new Promise((resolve) => { // reopen sphinx @@ -254,14 +254,14 @@ module.exports = async (callback, mainWindow, sphinxApp) => { }) // same args }) - console.log('sphinx restarted, patch db now') + logT('patcher', 'sphinx restarted, patch db now') for(let k = 1; k <= patch; k++) { torrentsArray = JSON.parse(fs.readFileSync(`${sphinxApp.directoryPath}/torrents.patch.${k}`, 'utf8')) - console.log('read torrents dump', `${sphinxApp.directoryPath}/torrents.patch.${k}`) + logT('patcher', 'read torrents dump', `${sphinxApp.directoryPath}/torrents.patch.${k}`) await asyncForEach(torrentsArray, async (torrent) => { - console.log('update index', torrent.id, torrent.name, '[', i, 'of', torrents, ']') + logT('patcher', 'update index', torrent.id, torrent.name, '[', i, 'of', torrents, ']') if(patchWindow) patchWindow.webContents.send('reindex', {field: torrent.name, index: i++, all: torrents, torrent: true}) @@ -274,7 +274,7 @@ module.exports = async (callback, mainWindow, sphinxApp) => { await new Promise((resolve) => { glob(`${sphinxApp.directoryPath}/torrents.patch.*`, function (er, files) { files.forEach(file => { - console.log('clear dump file', file) + logT('patcher', 'clear dump file', file) fs.unlinkSync(path.resolve(file)) }) resolve() @@ -283,7 +283,7 @@ module.exports = async (callback, mainWindow, sphinxApp) => { torrentsArray = null - console.log('optimizing torrents') + logT('patcher', 'optimizing torrents') if(patchWindow) patchWindow.webContents.send('optimize', {field: 'torrents'}) sphinx.query(`OPTIMIZE INDEX torrents`) @@ -292,7 +292,7 @@ module.exports = async (callback, mainWindow, sphinxApp) => { await setVersion(5) } } - console.log('db patch done') + logT('patcher', 'db patch done') sphinx.destroy() if(patchWindow) { @@ -306,14 +306,14 @@ module.exports = async (callback, mainWindow, sphinxApp) => { // init of db, we can set version to last if(sphinxApp && sphinxApp.isInitDb) { - console.log('new db, set version to last version', currentVersion) + logT('patcher', 'new db, set version to last version', currentVersion) await setVersion(currentVersion) } sphinx.query('select * from version', async (err, version) => { if(err) { - console.log('error on version get on db patch') + logT('patcher', 'error on version get on db patch') return } @@ -324,17 +324,17 @@ module.exports = async (callback, mainWindow, sphinxApp) => { const ver = parseInt(fs.readFileSync(`${sphinxApp.directoryPath}/version.vrs`)) if(ver > 0) { - console.log('readed version from version.vrs', ver) + logT('patcher', 'readed version from version.vrs', ver) patch(ver) } else { - console.log('error: bad version in version.vrs') + logT('patcher', 'error: bad version in version.vrs') } } else { - console.log('version not founded, set db version to 1') + logT('patcher', 'version not founded, set db version to 1') await setVersion(1) patch(1) } diff --git a/src/background/mysql.js b/src/background/mysql.js index f5b88ff..bf01791 100644 --- a/src/background/mysql.js +++ b/src/background/mysql.js @@ -156,7 +156,7 @@ const single = (callback) => { mysqlSingle._mysql.connect((mysqlError) => { if (mysqlError) { - console.error('error connecting: ' + mysqlError.stack); + logT('sql', 'error connecting: ' + mysqlError.stack); return; } diff --git a/src/background/torrentClient.js b/src/background/torrentClient.js index 6cb22a1..8ebed07 100644 --- a/src/background/torrentClient.js +++ b/src/background/torrentClient.js @@ -17,7 +17,7 @@ torrentClient.saveSession = (sessionFile) => { torrentClient.loadSession = (sessionFile) => { if(!fs.existsSync(sessionFile)) { - console.log('no download sessions - ignore') + logT('downloader', 'no download sessions - ignore') return } @@ -25,26 +25,26 @@ torrentClient.loadSession = (sessionFile) => { const obj = JSON.parse(data); if(!obj.torrents) { - console.log('no torrents list for loading session') + logT('downloader', 'no torrents list for loading session') return } if(!torrentClient._add) { - console.log('no overriden _add() method') + logT('downloader', 'no overriden _add() method') return } const {torrents} = obj torrents.forEach(({torrent, infoHash, path, removeOnDone, paused}) => { if(!torrent || !infoHash || !path) { - console.log('no info for starting download this torrent') + logT('downloader', 'no info for starting download this torrent') return } - console.log('restore download session:', torrent.name) + logT('downloader', 'restore download session:', torrent.name) const download = torrentClient._add(torrent, path) if(download) { - console.log('restore options') + logT('downloader', 'restore options') // restore options download.removeOnDone = removeOnDone if(paused) From 725632e70950009c66d834cc62cb26df0858e9ec Mon Sep 17 00:00:00 2001 From: Alexey Kasyanchuk Date: Tue, 7 Aug 2018 02:48:36 +0300 Subject: [PATCH 19/25] fix(server): color log server fix --- src/background/server.js | 40 ++++++++++++++++++++++++++++++---------- 1 file changed, 30 insertions(+), 10 deletions(-) diff --git a/src/background/server.js b/src/background/server.js index ff66247..a1de404 100644 --- a/src/background/server.js +++ b/src/background/server.js @@ -14,21 +14,41 @@ const os = require('os') const packageJson = JSON.parse(fs.readFileSync('package.json', 'utf8')); +const util = require('util'); +const colors = require('ansi-256-colors'); +const stringHashCode = (str) => { + let hash = 0, i, chr; + if (str.length === 0) + return hash; + for (i = 0; i < str.length; i++) { + chr = str.charCodeAt(i); + hash = ((hash << 5) - hash) + chr; + hash |= 0; // Convert to 32bit integer + } + return hash; +}; + +global.logT = (type, ...d) => { + console.log(colors.fg.codes[Math.abs(stringHashCode(type)) % 256] + `[${type}]` + colors.reset + ' ' + util.format(...d)); +} + + server.listen(appConfig.httpPort); -console.log('Listening web server on', appConfig.httpPort, 'port') -console.log('Platform:', os.platform()) -console.log('Arch:', os.arch()) -console.log('OS Release:', os.release()) -console.log('CPU:', os.cpus()[0].model) -console.log('CPU Logic cores:', os.cpus().length) -console.log('Total memory:', (os.totalmem() / (1024 * 1024)).toFixed(2), 'MB') -console.log('Free memory:', (os.freemem() / (1024 * 1024)).toFixed(2), 'MB') -console.log('NodeJS:', process.version) +logT('system', 'Rats v' + packageJson.version) +logT('system', 'Listening web server on', appConfig.httpPort, 'port') +logT('system', 'Platform:', os.platform()) +logT('system', 'Arch:', os.arch()) +logT('system', 'OS Release:', os.release()) +logT('system', 'CPU:', os.cpus()[0].model) +logT('system', 'CPU Logic cores:', os.cpus().length) +logT('system', 'Total memory:', (os.totalmem() / (1024 * 1024)).toFixed(2), 'MB') +logT('system', 'Free memory:', (os.freemem() / (1024 * 1024)).toFixed(2), 'MB') +logT('system', 'NodeJS:', process.version) const majorVersion = /v?([0-9]+)\.?([0-9]+)?\.?([0-9]+)?\.?([0-9]+)?/.exec(process.version)[1] if(majorVersion < 8) { - console.log('Minumum Node.JS version >= 8.0.0, please update and try again') + logT('system', 'Minumum Node.JS version >= 8.0.0, please update and try again') process.exit(1); } From 297baac3d335a618460d082be1e166e04edccd7a Mon Sep 17 00:00:00 2001 From: Alexey Kasyanchuk Date: Tue, 7 Aug 2018 17:21:46 +0300 Subject: [PATCH 20/25] fix(db): under mac and linux using alternative pool mechanism this must fix test brokeup and closing stub --- src/background/mysql.js | 72 ++++++++++++++++++++++++++++++++++----- src/background/spider.js | 2 +- tests/bigtableapi.test.js | 8 +++-- tests/init.js | 1 + tests/sphinx.test.js | 34 ++++++++++++------ 5 files changed, 96 insertions(+), 21 deletions(-) diff --git a/src/background/mysql.js b/src/background/mysql.js index bf01791..40c3053 100644 --- a/src/background/mysql.js +++ b/src/background/mysql.js @@ -113,14 +113,70 @@ const expand = (sphinx) => { return sphinx } -const pool = () => { - let sphinx = mysql.createPool({ - // bug under mac with some problems on big connection size, limit this to very low value on mac os x - connectionLimit: process.platform === 'darwin' ? 3 : config.sphinx.connectionLimit, - host : config.sphinx.host, - port : config.sphinx.port - }); - return expand(sphinx) +const pool = async () => { + if(/^win/.test(process.platform)) + { + logT('sql', 'using main pool mechanism') + let sphinx = mysql.createPool({ + // bug under mac with some problems on big connection size, limit this to very low value on mac os x + connectionLimit: process.platform === 'darwin' ? 3 : config.sphinx.connectionLimit, + host : config.sphinx.host, + port : config.sphinx.port + }); + sphinx = expand(sphinx) + const end = sphinx.end.bind(sphinx) + sphinx.end = async (cb) => new Promise(resolve => end(() => { + resolve() + if(cb) cb() + })) + return sphinx + } + else + { + logT('sql', 'using alternative pool mechanism') + let connectionPool = [] + let connectionsLimit = config.sphinx.connectionLimit + let currentConnection = 0 + for(let i = 0; i < connectionsLimit; i++) + { + connectionPool[i] = await single().waitConnection() + } + const buildPoolMethod = (name, ...args) => { + if(!connectionPool) + return + + const data = connectionPool[currentConnection][name](...args) + currentConnection = (currentConnection + 1) % connectionsLimit + return data + } + return new Proxy({ + query(...args) { + return buildPoolMethod('query', ...args) + }, + insertValues(...args) { + return buildPoolMethod('insertValues', ...args) + }, + updateValues(...args) { + return buildPoolMethod('updateValues', ...args) + }, + async end(cb) + { + await Promise.all(connectionPool.map(conn => new Promise(resolve => conn.end(resolve)))) + if(cb) + cb() + connectionPool = null + } + }, { + get(target, prop) + { + if(!target[prop]) + { + return connectionPool[0][prop] + } + return target[prop] + } + }) + } } const single = (callback) => { diff --git a/src/background/spider.js b/src/background/spider.js index 314119f..65b170b 100644 --- a/src/background/spider.js +++ b/src/background/spider.js @@ -45,7 +45,7 @@ module.exports = function (send, recive, dataDirectory, version, env) let filesId = 1; const events = new EventEmitter - let sphinx = pool(); + let sphinx = await pool(); // initialize p2p const p2p = new P2PServer(send) diff --git a/tests/bigtableapi.test.js b/tests/bigtableapi.test.js index ef17a7c..46456d2 100644 --- a/tests/bigtableapi.test.js +++ b/tests/bigtableapi.test.js @@ -8,8 +8,8 @@ const forBigTable = require('../src/background/forBigTable') describe("big table for check", () => { let sphinx; - it("init", function() { - sphinx = pool() + it("init", async function() { + sphinx = await pool() expect(sphinx) }) @@ -36,4 +36,8 @@ describe("big table for check", () => { await forBigTable(sphinx, 'feed', record => records.push(record), null, 15) expect(records.length === 13) }) + + it("close", async function() { + await sphinx.end() + }) }); diff --git a/tests/init.js b/tests/init.js index fefa84c..c623163 100644 --- a/tests/init.js +++ b/tests/init.js @@ -1,4 +1,5 @@ import {startApplication, stopApplication} from "../tests/application"; +global.logT = (...args) => {console.log(...args)} describe("application", () => { before(startApplication); diff --git a/tests/sphinx.test.js b/tests/sphinx.test.js index cd90c33..2f28bd0 100644 --- a/tests/sphinx.test.js +++ b/tests/sphinx.test.js @@ -1,4 +1,4 @@ -import { expect } from "chai"; +import { expect, assert } from "chai"; const mysql = require('mysql') const config = require('../src/background/config') @@ -60,15 +60,29 @@ describe("sphinx", () => { }) }) - it("query limit", function(done) { - const sphinx = pool() - let promises = [] - sphinx.query(`delete from feed where id >= 0`, () => { - for(let i = 0; i < 500; i++) - promises.push(sphinx.query(`insert into feed(id, data) values(${i}, 'a')`)) - Promise.all(promises).then(() => { - sphinx.query(`delete from feed where id >= 0`, () => done()) + it("query limit", function(done) { + const test = async () => { + const sphinx = await pool() + let promises = [] + sphinx.query(`delete from feed where id >= 0`, () => { + for(let i = 0; i < 500; i++) + promises.push(sphinx.query(`insert into feed(id, data) values(${i}, 'a')`)) + Promise.all(promises).then(() => { + sphinx.query(`delete from feed where id >= 0`, async () => { + await sphinx.end() + done() + }) + }) }) - }) + } + test() + }) + + it("escape", function () { + assert.equal(sphinx.escape(`naru'to`), `'naru\\'to'`) + }) + + it("close pool", function(done) { + sphinx.end(done) }) }); From ddfd43498ddacfbf75fd349e30afec518781146e Mon Sep 17 00:00:00 2001 From: Alexey Kasyanchuk Date: Tue, 7 Aug 2018 22:54:36 +0300 Subject: [PATCH 21/25] feat(log): error display with separate color #31 --- src/background/api.js | 4 ++-- src/background/background.js | 8 +++++++- src/background/dbPatcher.js | 2 +- src/background/forBigTable.js | 2 +- src/background/mysql.js | 4 ++-- src/background/server.js | 6 +++++- src/background/spider.js | 13 +++++++------ src/background/store.js | 2 +- tests/init.js | 1 + 9 files changed, 27 insertions(+), 15 deletions(-) diff --git a/src/background/api.js b/src/background/api.js index 2aa6cfb..fb6f715 100644 --- a/src/background/api.js +++ b/src/background/api.js @@ -106,7 +106,7 @@ module.exports = async ({ sphinx.query('SELECT count(*) AS torrents, sum(size) AS sz FROM `torrents`', function (error, rows, fields) { if(!rows) { - console.error(error) + logTE('statistic', error) callback(undefined) return; } @@ -115,7 +115,7 @@ module.exports = async ({ sphinx.query('SELECT count(*) AS files FROM `files`', function (error, rows, fields) { if(!rows) { - console.error(error) + logTE('statistic', error) callback(undefined) return; } diff --git a/src/background/background.js b/src/background/background.js index 67e24de..16e64c0 100644 --- a/src/background/background.js +++ b/src/background/background.js @@ -103,6 +103,12 @@ global.logT = (type, ...d) => { logStdout.write(colors.fg.codes[Math.abs(stringHashCode(type)) % 256] + `[${type}]` + colors.reset + ' ' + util.format(...d) + '\n'); } +global.logTE = (type, ...d) => { + const date = (new Date).toLocaleTimeString() + logFile.write(`\n[${date}] [ERROR] [${type}] ` + util.format(...d) + '\n\n'); + logStdout.write(colors.fg.codes[Math.abs(stringHashCode(type)) % 256] + `[${type}]` + colors.reset + ' ' + colors.fg.codes[9] + util.format(...d) + colors.reset + '\n'); +} + // print os info logT('system', 'Rats', app.getVersion()) logT('system', 'Platform:', os.platform()) @@ -118,7 +124,7 @@ if(portative) logT('system', 'portative compability') // handle promise rejections -process.on('unhandledRejection', r => console.log('Rejection:', r)); +process.on('unhandledRejection', r => logTE('system', 'Rejection:', r)); const shouldQuit = app.makeSingleInstance(function(commandLine, workingDirectory) { // Someone tried to run a second instance, we should focus our window. diff --git a/src/background/dbPatcher.js b/src/background/dbPatcher.js index 867a83a..b1eb544 100644 --- a/src/background/dbPatcher.js +++ b/src/background/dbPatcher.js @@ -313,7 +313,7 @@ module.exports = async (callback, mainWindow, sphinxApp) => { sphinx.query('select * from version', async (err, version) => { if(err) { - logT('patcher', 'error on version get on db patch') + logTE('patcher', 'error on version get on db patch') return } diff --git a/src/background/forBigTable.js b/src/background/forBigTable.js index 8da4114..48ccbbc 100644 --- a/src/background/forBigTable.js +++ b/src/background/forBigTable.js @@ -3,7 +3,7 @@ module.exports = (sphinx, table, callback, doneCallback, max = 1000, where = '') sphinx.query(`SELECT * FROM ${table} WHERE id > ${index} ${where} LIMIT ${max}`, (err, torrents) => { const finish = () => { if(err) - logT('sql', 'big table parse error', err) + logTE('sql', 'big table parse error', err) if(doneCallback) doneCallback(true) done(true) diff --git a/src/background/mysql.js b/src/background/mysql.js index 40c3053..da6ac72 100644 --- a/src/background/mysql.js +++ b/src/background/mysql.js @@ -223,12 +223,12 @@ const single = (callback) => { }); mysqlSingle._mysql.on('error', (err) => { - logT('sql', 'db error', err); if(err.code === 'PROTOCOL_CONNECTION_LOST') { // Connection to the MySQL server is usually - logT('sql', 'restart single sql connection') + logT('sql', 'lost connection, restart single sql connection') mysqlSingle._mysql = undefined start(); // lost due to either server restart, or a } else { // connnection idle timeout (the wait_timeout + logTE('sql', 'db error', err); throw err; // server variable configures this) } }); diff --git a/src/background/server.js b/src/background/server.js index a1de404..2aa8e23 100644 --- a/src/background/server.js +++ b/src/background/server.js @@ -32,6 +32,10 @@ global.logT = (type, ...d) => { console.log(colors.fg.codes[Math.abs(stringHashCode(type)) % 256] + `[${type}]` + colors.reset + ' ' + util.format(...d)); } +global.logTE = (type, ...d) => { + console.log(colors.fg.codes[Math.abs(stringHashCode(type)) % 256] + `[${type}]` + colors.reset + ' ' + colors.fg.codes[9] + util.format(...d) + colors.reset + '\n'); +} + server.listen(appConfig.httpPort); logT('system', 'Rats v' + packageJson.version) @@ -48,7 +52,7 @@ logT('system', 'NodeJS:', process.version) const majorVersion = /v?([0-9]+)\.?([0-9]+)?\.?([0-9]+)?\.?([0-9]+)?/.exec(process.version)[1] if(majorVersion < 8) { - logT('system', 'Minumum Node.JS version >= 8.0.0, please update and try again') + logTE('system', 'Minumum Node.JS version >= 8.0.0, please update and try again') process.exit(1); } diff --git a/src/background/spider.js b/src/background/spider.js index 65b170b..7894936 100644 --- a/src/background/spider.js +++ b/src/background/spider.js @@ -129,7 +129,7 @@ module.exports = function (send, recive, dataDirectory, version, env) resolve(data.length > 0 && JSON.parse(data)) }); }).on("error", (err) => { - logT('http', `${url} error: ` + err.message) + logTE('http', `${url} error: ` + err.message) resolve(false) }); }) @@ -225,7 +225,7 @@ module.exports = function (send, recive, dataDirectory, version, env) sphinxSingle.query('UPDATE torrents SET seeders = ?, completed = ?, leechers = ?, trackersChecked = ? WHERE hash = ?', [seeders, completed, leechers, Math.floor(checkTime.getTime() / 1000), hash], function(err, result) { if(!result) { - console.error(err); + logTE('udp-tracker', err); return } @@ -423,7 +423,7 @@ module.exports = function (send, recive, dataDirectory, version, env) sphinxSingle.query("SELECT id FROM torrents WHERE hash = ?", torrent.hash, (err, single) => { if(!single) { - logT('add', err) + logTE('add', err) resolve() return } @@ -462,8 +462,7 @@ module.exports = function (send, recive, dataDirectory, version, env) } else { - logT('add', torrent); - console.error(err); + logTE('add', err); } resolve() events.emit('insert', torrent) @@ -475,6 +474,7 @@ module.exports = function (send, recive, dataDirectory, version, env) const {hash} = torrent await sphinxSingle.query('DELETE FROM torrents WHERE hash = ?', hash) await sphinxSingle.query('DELETE FROM files WHERE hash = ?', hash) + logT('remove', 'removed torrent', torrent.name || torrent.hash) } const updateTorrentToDB = async (torrent) => { @@ -491,6 +491,7 @@ module.exports = function (send, recive, dataDirectory, version, env) delete torrent.filesList await sphinxSingle.updateValues('torrents', torrent, {hash: torrent.hash}) + logT('update', 'updated torrent', torrent.name) } const insertMetadata = (metadata, infohash, rinfo) => { @@ -551,7 +552,7 @@ module.exports = function (send, recive, dataDirectory, version, env) { disk.check(rootPath, function(err, info) { if (err) { - logT('quota', err); + logTE('quota', err); } else { const {available, free, total} = info; diff --git a/src/background/store.js b/src/background/store.js index 8d07fa5..1c01f42 100644 --- a/src/background/store.js +++ b/src/background/store.js @@ -145,7 +145,7 @@ module.exports = class P2PStore extends EventEmitter { (err) => { if(err) { - logT('store', err) + logTE('store', err) return } diff --git a/tests/init.js b/tests/init.js index c623163..e911421 100644 --- a/tests/init.js +++ b/tests/init.js @@ -1,5 +1,6 @@ import {startApplication, stopApplication} from "../tests/application"; global.logT = (...args) => {console.log(...args)} +global.logTE = (...args) => {console.log('error', ...args)} describe("application", () => { before(startApplication); From 463eb7cce7c0cad68c4ff06b167c4f81d1e5f950 Mon Sep 17 00:00:00 2001 From: Alexey Kasyanchuk Date: Wed, 8 Aug 2018 00:58:58 +0300 Subject: [PATCH 22/25] fix(closing): fix errors on closing --- src/background/api.js | 4 ---- src/background/mysql.js | 35 +++++++++++++++++++++++++++++------ src/background/spider.js | 17 +++++++++-------- 3 files changed, 38 insertions(+), 18 deletions(-) diff --git a/src/background/api.js b/src/background/api.js index fb6f715..17547e9 100644 --- a/src/background/api.js +++ b/src/background/api.js @@ -210,10 +210,6 @@ module.exports = async ({ if(typeof callback != 'function') return; - // ignore sql requests on closing - if(sphinxSingle.state === 'disconnected') - return - const cpu = cpuUsage() const limit = Math.max(1, 5 - (cpu / 20) | 0) diff --git a/src/background/mysql.js b/src/background/mysql.js index da6ac72..c37e358 100644 --- a/src/background/mysql.js +++ b/src/background/mysql.js @@ -2,7 +2,15 @@ const mysql = require('mysql'); const config = require('./config'); const expand = (sphinx) => { - const queryCall = sphinx.query.bind(sphinx) + const queryOriginal = sphinx.query.bind(sphinx) + const queryCall = (...args) => { + if(sphinx.__closed) + { + logT('sql', 'prevent sql request after end of connection') + return + } + return queryOriginal(...args) + } sphinx.query = (sql, args, callback) => new Promise((resolve, reject) => { if(typeof args === 'function' || typeof args === 'undefined') @@ -125,10 +133,13 @@ const pool = async () => { }); sphinx = expand(sphinx) const end = sphinx.end.bind(sphinx) - sphinx.end = async (cb) => new Promise(resolve => end(() => { - resolve() - if(cb) cb() - })) + sphinx.end = (cb) => new Promise(resolve => { + sphinx.__closed = true + end(() => { + resolve() + if(cb) cb() + }) + }) return sphinx } else @@ -161,7 +172,7 @@ const pool = async () => { }, async end(cb) { - await Promise.all(connectionPool.map(conn => new Promise(resolve => conn.end(resolve)))) + await Promise.all(connectionPool.map(conn => conn.end())) if(cb) cb() connectionPool = null @@ -234,6 +245,18 @@ const single = (callback) => { }); mysqlSingle._mysql = expand(mysqlSingle._mysql) + + // fix prevent query after closing + const end = mysqlSingle._mysql.end.bind(mysqlSingle._mysql) + mysqlSingle._mysql.end = (cb) => new Promise(resolve => { + mysqlSingle._mysql.__closed = true + end(() => { + resolve() + if(cb) + cb() + }) + }) + return proxySingle } diff --git a/src/background/spider.js b/src/background/spider.js index 7894936..d50bf38 100644 --- a/src/background/spider.js +++ b/src/background/spider.js @@ -743,7 +743,7 @@ module.exports = function (send, recive, dataDirectory, version, env) upnp.ratsUnmap() logT('close', 'closing alternative db interface') - await new Promise(resolve => sphinxSingleAlternative.end(resolve)) + await sphinxSingleAlternative.end() // save torrents sessions logT('close', 'save torrents downloads sessions') @@ -830,13 +830,14 @@ module.exports = function (send, recive, dataDirectory, version, env) client.removeAllListeners('complete') logT('close', 'closing torrent client') - torrentClient.destroy(() => { - sphinx.end(() => spider.close(() => { - sphinxSingle.destroy() - logT('close', 'spider closed') - callback() - })) - }) + torrentClient.destroy(() => spider.close(async () => { + await sphinx.end() + logT('close', 'pool closed') + await sphinxSingle.end() + logT('close', 'single closed') + logT('close', 'spider closed') + callback() + })) } })() From afa5847e8252c59d75047b42672efdcc44debbf0 Mon Sep 17 00:00:00 2001 From: Alexey Kasyanchuk Date: Wed, 8 Aug 2018 02:01:19 +0300 Subject: [PATCH 23/25] release --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index f8b180d..995732e 100644 --- a/.gitignore +++ b/.gitignore @@ -7,6 +7,8 @@ binlog.* *.pid version.vrs sphinx.conf +*.p2p +downloads.json /dist /temp From f8c67989687aae6ed68c90244f4097a6ee9bef1d Mon Sep 17 00:00:00 2001 From: semantic-release-bot Date: Tue, 7 Aug 2018 23:06:05 +0000 Subject: [PATCH 24/25] chore(release): 0.28.0 [skip ci] # [0.28.0](https://github.com/DEgITx/rats-search/compare/v0.27.0...v0.28.0) (2018-08-07) ### Bug Fixes * **closing:** fix errors on closing ([e04548a](https://github.com/DEgITx/rats-search/commit/e04548a)) * **db:** under mac and linux using alternative pool mechanism ([a3644c0](https://github.com/DEgITx/rats-search/commit/a3644c0)) * **log:** color log (part 2) ([ea8d93e](https://github.com/DEgITx/rats-search/commit/ea8d93e)) * **log:** color log (part 3) ([bc23379](https://github.com/DEgITx/rats-search/commit/bc23379)) * **log:** color messages (part 1) ([27b224d](https://github.com/DEgITx/rats-search/commit/27b224d)) * **server:** color log server fix ([17b377c](https://github.com/DEgITx/rats-search/commit/17b377c)) ### Features * **log:** color log ([62bbc46](https://github.com/DEgITx/rats-search/commit/62bbc46)) * **log:** error display with separate color [#31](https://github.com/DEgITx/rats-search/issues/31) ([70dd4a3](https://github.com/DEgITx/rats-search/commit/70dd4a3)) ### Performance Improvements * **replication:** replicate number accordion to cpu usage ([6af3b7a](https://github.com/DEgITx/rats-search/commit/6af3b7a)) * **torrents:** ability to disable integrity check on torrents adding torrents [#47](https://github.com/DEgITx/rats-search/issues/47) ([080fc92](https://github.com/DEgITx/rats-search/commit/080fc92)) --- CHANGELOG.md | 24 ++++++++++++++++++++++++ package.json | 2 +- 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e1ec538..3452182 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,27 @@ +# [0.28.0](https://github.com/DEgITx/rats-search/compare/v0.27.0...v0.28.0) (2018-08-07) + + +### Bug Fixes + +* **closing:** fix errors on closing ([e04548a](https://github.com/DEgITx/rats-search/commit/e04548a)) +* **db:** under mac and linux using alternative pool mechanism ([a3644c0](https://github.com/DEgITx/rats-search/commit/a3644c0)) +* **log:** color log (part 2) ([ea8d93e](https://github.com/DEgITx/rats-search/commit/ea8d93e)) +* **log:** color log (part 3) ([bc23379](https://github.com/DEgITx/rats-search/commit/bc23379)) +* **log:** color messages (part 1) ([27b224d](https://github.com/DEgITx/rats-search/commit/27b224d)) +* **server:** color log server fix ([17b377c](https://github.com/DEgITx/rats-search/commit/17b377c)) + + +### Features + +* **log:** color log ([62bbc46](https://github.com/DEgITx/rats-search/commit/62bbc46)) +* **log:** error display with separate color [#31](https://github.com/DEgITx/rats-search/issues/31) ([70dd4a3](https://github.com/DEgITx/rats-search/commit/70dd4a3)) + + +### Performance Improvements + +* **replication:** replicate number accordion to cpu usage ([6af3b7a](https://github.com/DEgITx/rats-search/commit/6af3b7a)) +* **torrents:** ability to disable integrity check on torrents adding torrents [#47](https://github.com/DEgITx/rats-search/issues/47) ([080fc92](https://github.com/DEgITx/rats-search/commit/080fc92)) + # [0.27.0](https://github.com/DEgITx/rats-search/compare/v0.26.2...v0.27.0) (2018-08-06) diff --git a/package.json b/package.json index 44434e2..af73c9c 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "name": "rats-search", "productName": "Rats on The Boat", "description": "P2P BitTorrent search engine", - "version": "0.27.0", + "version": "0.28.0", "private": true, "author": "Alexey Kasyanchuk ", "copyright": "Copyright © 2018 Alexey Kasyanchuk", From 5cf5bea884c0d22e2d5f66e67e3cd3c8ed8d4fea Mon Sep 17 00:00:00 2001 From: Alexey Kasyanchuk Date: Wed, 8 Aug 2018 04:16:13 +0300 Subject: [PATCH 25/25] fix(ssh): prevent ssh relay startup on exit --- src/background/p2p.js | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/background/p2p.js b/src/background/p2p.js index 7a4c1fc..2bdb32d 100644 --- a/src/background/p2p.js +++ b/src/background/p2p.js @@ -139,6 +139,9 @@ class p2p { checkPortAndRedirect(address, port) { isPortReachable(port, {host: address}).then((isAvailable) => { + if(this.closing) + return // responce can be very late, and ssh can start after closing of program, this will break on linux + this.p2pStatus = isAvailable ? 2 : 0 this.send('p2pStatus', this.p2pStatus) @@ -183,6 +186,7 @@ class p2p { close() { + this.closing = true if(this.ssh) { logT('ssh', 'closing ssh...')