fix eslint

This commit is contained in:
Alexey Kasyanchuk
2018-06-18 00:06:28 +03:00
parent 93b1b91f56
commit fd4ba2c392
61 changed files with 5528 additions and 5528 deletions

View File

@ -35,17 +35,17 @@ module.exports = async ({
return;
sphinx.query('SELECT * FROM `torrents` ORDER BY added DESC LIMIT 0,10', function (error, rows, fields) {
if(!rows) {
callback(undefined)
return;
}
if(!rows) {
callback(undefined)
return;
}
let torrents = [];
rows.forEach((row) => {
torrents.push(baseRowData(row));
});
let torrents = [];
rows.forEach((row) => {
torrents.push(baseRowData(row));
});
callback(torrents)
callback(torrents)
});
});
@ -55,25 +55,25 @@ module.exports = async ({
return;
sphinx.query('SELECT count(*) AS torrents, sum(size) AS sz FROM `torrents`', function (error, rows, fields) {
if(!rows) {
console.error(error)
callback(undefined)
return;
}
let result = {torrents: rows[0].torrents || 0, size: rows[0].sz || 0}
sphinx.query('SELECT count(*) AS files FROM `files`', function (error, rows, fields) {
if(!rows) {
console.error(error)
callback(undefined)
return;
if(!rows) {
console.error(error)
callback(undefined)
return;
}
result.files = rows[0].files || 0
let result = {torrents: rows[0].torrents || 0, size: rows[0].sz || 0}
callback(result)
})
sphinx.query('SELECT count(*) AS files FROM `files`', function (error, rows, fields) {
if(!rows) {
console.error(error)
callback(undefined)
return;
}
result.files = rows[0].files || 0
callback(result)
})
});
});
@ -112,43 +112,43 @@ module.exports = async ({
}
sphinx.query('SELECT * FROM `torrents` WHERE `hash` = ?', hash, async function (error, rows, fields) {
if(!rows || rows.length == 0) {
callback(undefined)
return;
}
let torrent = rows[0];
if(options.files)
{
torrent.filesList = await sphinx.query('SELECT * FROM `files` WHERE `hash` = ? LIMIT 50000', hash);
callback(baseRowData(torrent))
}
else
{
callback(baseRowData(torrent))
}
if(torrentClientHashMap[hash])
{
const torrent = torrentClient.get(torrentClientHashMap[hash])
if(torrent)
{
send('downloading', torrent.infoHash)
if(!rows || rows.length == 0) {
callback(undefined)
return;
}
}
let torrent = rows[0];
// get votes
const {good, bad, selfVote} = await getVotes(hash)
send('votes', {
hash, good, bad, selfVote
});
if(torrent.good != good || torrent.bad != bad)
{
console.log('finded new rating on', torrent.name, 'update votes to it')
torrent.good = good
torrent.bad = bad
updateTorrentToDB(torrent)
}
if(options.files)
{
torrent.filesList = await sphinx.query('SELECT * FROM `files` WHERE `hash` = ? LIMIT 50000', hash);
callback(baseRowData(torrent))
}
else
{
callback(baseRowData(torrent))
}
if(torrentClientHashMap[hash])
{
const torrent = torrentClient.get(torrentClientHashMap[hash])
if(torrent)
{
send('downloading', torrent.infoHash)
}
}
// get votes
const {good, bad, selfVote} = await getVotes(hash)
send('votes', {
hash, good, bad, selfVote
});
if(torrent.good != good || torrent.bad != bad)
{
console.log('finded new rating on', torrent.name, 'update votes to it')
torrent.good = good
torrent.bad = bad
updateTorrentToDB(torrent)
}
});
}
@ -167,27 +167,27 @@ module.exports = async ({
p2p.on('randomTorrents', (nil, callback) => {
if(typeof callback != 'function')
return;
sphinx.query('SELECT * FROM `torrents` ORDER BY rand() limit 5', (error, torrents) => {
if(!torrents || torrents.length == 0) {
callback(undefined)
return;
}
let hashes = {}
for(const torrent of torrents)
{
delete torrent.id
hashes[torrent.hash] = torrent
}
const inSql = Object.keys(hashes).map(hash => sphinx.escape(hash)).join(',');
sphinx.query(`SELECT * FROM files WHERE hash IN(${inSql}) limit 50000`, (error, files) => {
if(!files)
{
files = []
}
files.forEach((file) => {
if(!hashes[file.hash].filesList)
hashes[file.hash].filesList = []
@ -277,13 +277,13 @@ module.exports = async ({
sphinx.query('SELECT * FROM `torrents` WHERE MATCH(?) ' + where + ' ' + order + ' LIMIT ?,?', args, function (error, rows, fields) {
if(!rows) {
console.log(error)
callback(undefined)
return;
callback(undefined)
return;
}
rows.forEach((row) => {
searchList.push(baseRowData(row));
});
callback(searchList);
});
callback(searchList);
});
}
@ -328,37 +328,37 @@ module.exports = async ({
let where = '';
/*
if(orderBy && orderBy.length > 0)
{
const orderDesc = navigation.orderDesc ? 'DESC' : 'ASC';
args.splice(1, 0, orderBy);
order = 'ORDER BY ?? ' + orderDesc;
}
*/
if(orderBy && orderBy.length > 0)
{
const orderDesc = navigation.orderDesc ? 'DESC' : 'ASC';
args.splice(1, 0, orderBy);
order = 'ORDER BY ?? ' + orderDesc;
}
*/
/*
if(safeSearch)
{
where += " and contentCategory != 'xxx' ";
}
if(navigation.type && navigation.type.length > 0)
{
where += ' and contentType = ' + sphinx.escape(navigation.type) + ' ';
}
if(navigation.size)
{
if(navigation.size.max > 0)
where += ' and torrentSize < ' + sphinx.escape(navigation.size.max) + ' ';
if(navigation.size.min > 0)
where += ' and torrentSize > ' + sphinx.escape(navigation.size.min) + ' ';
}
if(navigation.files)
{
if(navigation.files.max > 0)
where += ' and files < ' + sphinx.escape(navigation.files.max) + ' ';
if(navigation.files.min > 0)
where += ' and files > ' + sphinx.escape(navigation.files.min) + ' ';
}
*/
if(safeSearch)
{
where += " and contentCategory != 'xxx' ";
}
if(navigation.type && navigation.type.length > 0)
{
where += ' and contentType = ' + sphinx.escape(navigation.type) + ' ';
}
if(navigation.size)
{
if(navigation.size.max > 0)
where += ' and torrentSize < ' + sphinx.escape(navigation.size.max) + ' ';
if(navigation.size.min > 0)
where += ' and torrentSize > ' + sphinx.escape(navigation.size.min) + ' ';
}
if(navigation.files)
{
if(navigation.files.max > 0)
where += ' and files < ' + sphinx.escape(navigation.files.max) + ' ';
if(navigation.files.min > 0)
where += ' and files > ' + sphinx.escape(navigation.files.min) + ' ';
}
*/
let search = {};
//args.splice(orderBy && orderBy.length > 0 ? 1 : 0, 1);
@ -366,8 +366,8 @@ module.exports = async ({
sphinx.query('SELECT * FROM `files` WHERE MATCH(?) ' + where + ' ' + order + ' LIMIT ?,?', args, function (error, files, fields) {
if(!files) {
console.log(error)
callback(undefined)
return;
callback(undefined)
return;
}
if(files.length === 0)
{
@ -392,7 +392,7 @@ module.exports = async ({
for(const torrent of torrents)
{
search[torrent.hash] = Object.assign(baseRowData(torrent), search[torrent.hash])
// temporary ignore adult content in search (workaroud)
if(safeSearch && search[torrent.hash].contentCategory == 'xxx')
delete search[torrent.hash]
@ -466,7 +466,7 @@ module.exports = async ({
where += ' and `added` > ' + (Math.floor(Date.now() / 1000) - (60 * 60 * 24 * 30))
}
}
const query = `SELECT * FROM torrents WHERE seeders > 0 and contentCategory != 'xxx' ${where} ORDER BY seeders DESC LIMIT ${index},${limit}`;
if(topCache[query])
{
@ -478,10 +478,10 @@ module.exports = async ({
callback(undefined)
return;
}
rows = rows.map((row) => baseRowData(row));
topCache[query] = rows;
callback(rows);
callback(rows);
});
}
@ -571,7 +571,7 @@ module.exports = async ({
{
spider.announceHashes = []
}
if(typeof callback === 'function')
callback(true)
});
@ -739,9 +739,9 @@ module.exports = async ({
if(!temp || !temp.torrent)
return
const { torrent } = temp
if(torrent.hash !== record.torrentHash)
return
@ -760,7 +760,7 @@ module.exports = async ({
// update feed
if(record.vote !== 'good')
return
feed.add(torrent)
send('feedUpdate', {
feed: feed.feed
@ -787,10 +787,10 @@ module.exports = async ({
p2p.emit('feed', null, (remoteFeed) => {
if(!remoteFeed)
return
if(remoteFeed.length <= feed.size())
return
console.log('replace our feed with remote feed')
feed.feed = remoteFeed
send('feedUpdate', {
@ -799,5 +799,5 @@ module.exports = async ({
});
}, 1000)
})
}

View File

@ -28,17 +28,17 @@ require('electron-context-menu')({})
// Thanks to this you can use production and development versions of the app
// on same machine like those are two separate apps.
if (env.name !== "production") {
const userDataPath = app.getPath("userData");
app.setPath("userData", `${userDataPath} (${env.name})`);
const userDataPath = app.getPath("userData");
app.setPath("userData", `${userDataPath} (${env.name})`);
}
// portative version
let portative = false
if(env.name === "production") {
if(fs.existsSync(path.dirname(process.execPath) + `/data`))
{
portative = true;
app.setPath("userData", path.dirname(process.execPath) + `/data`);
}
if(fs.existsSync(path.dirname(process.execPath) + `/data`))
{
portative = true;
app.setPath("userData", path.dirname(process.execPath) + `/data`);
}
}
const resourcesPath = env.name === "production" ? process.resourcesPath : 'resources'
@ -54,33 +54,33 @@ let sphinx = undefined
let spider = undefined
const setApplicationMenu = () => {
const settingsMenuTemplate = settingsMenuTemplateFunc(appConfig, (lang) => {
// update menu translation
changeLanguage(lang, () => setApplicationMenu())
})
const menus = [editMenuTemplateFunc(), manageMenuTemplateFunc(), settingsMenuTemplate, aboutMenuTemplateFunc()];
const settingsMenuTemplate = settingsMenuTemplateFunc(appConfig, (lang) => {
// update menu translation
changeLanguage(lang, () => setApplicationMenu())
})
const menus = [editMenuTemplateFunc(), manageMenuTemplateFunc(), settingsMenuTemplate, aboutMenuTemplateFunc()];
if (env.name !== "production") {
menus.push(devMenuTemplate);
}
// append version as disabled menu item
menus.push({
label: app.getVersion()
})
Menu.setApplicationMenu(Menu.buildFromTemplate(menus));
if (env.name !== "production") {
menus.push(devMenuTemplate);
}
// append version as disabled menu item
menus.push({
label: app.getVersion()
})
Menu.setApplicationMenu(Menu.buildFromTemplate(menus));
};
const util = require('util');
if (!fs.existsSync(app.getPath("userData"))){
fs.mkdirSync(app.getPath("userData"));
fs.mkdirSync(app.getPath("userData"));
}
const logFile = fs.createWriteStream(app.getPath("userData") + '/rats.log', {flags : 'w'});
const logStdout = process.stdout;
console.log = (...d) => {
const date = (new Date).toLocaleTimeString()
logFile.write(`[${date}] ` + util.format(...d) + '\n');
logStdout.write(util.format(...d) + '\n');
const date = (new Date).toLocaleTimeString()
logFile.write(`[${date}] ` + util.format(...d) + '\n');
logStdout.write(util.format(...d) + '\n');
};
// print os info
@ -94,21 +94,21 @@ console.log('Total memory:', (os.totalmem() / (1024 * 1024)).toFixed(2), 'MB')
console.log('Free memory:', (os.freemem() / (1024 * 1024)).toFixed(2), 'MB')
if(portative)
console.log('portative compability')
console.log('portative compability')
const shouldQuit = app.makeSingleInstance(function(commandLine, workingDirectory) {
// Someone tried to run a second instance, we should focus our window.
console.log('openned second application, just focus this one')
if (mainWindow) {
if (mainWindow.isMinimized())
mainWindow.restore();
mainWindow.focus();
}
// Someone tried to run a second instance, we should focus our window.
console.log('openned second application, just focus this one')
if (mainWindow) {
if (mainWindow.isMinimized())
mainWindow.restore();
mainWindow.focus();
}
});
if (shouldQuit) {
console.log('closed because of second application')
app.exit(0);
console.log('closed because of second application')
app.exit(0);
}
// log autoupdate
@ -116,162 +116,162 @@ const log = require('electron-log')
log.transports.file.level = false;
log.transports.console.level = false;
log.transports.console = function(msg) {
const text = util.format.apply(util, msg.data);
console.log(text);
const text = util.format.apply(util, msg.data);
console.log(text);
};
autoUpdater.logger = log;
autoUpdater.on('update-downloaded', () => {
console.log('update-downloaded lats quitAndInstall');
if (env.name === "production") {
dialog.showMessageBox({
type: 'info',
title: 'Found Updates',
message: 'Found updates, do you want update now?',
buttons: ['Sure', 'No']
}, (buttonIndex) => {
if (buttonIndex === 0) {
const isSilent = true;
const isForceRunAfter = true;
autoUpdater.quitAndInstall(isSilent, isForceRunAfter);
}
})
}
console.log('update-downloaded lats quitAndInstall');
if (env.name === "production") {
dialog.showMessageBox({
type: 'info',
title: 'Found Updates',
message: 'Found updates, do you want update now?',
buttons: ['Sure', 'No']
}, (buttonIndex) => {
if (buttonIndex === 0) {
const isSilent = true;
const isForceRunAfter = true;
autoUpdater.quitAndInstall(isSilent, isForceRunAfter);
}
})
}
})
let tray = undefined
app.on("ready", () => {
sphinx = startSphinx(() => {
sphinx = startSphinx(() => {
mainWindow = createWindow("main", {
width: 1000,
height: 600
});
mainWindow = createWindow("main", {
width: 1000,
height: 600
});
dbPatcher(() => {
changeLanguage(appConfig.language, () => setApplicationMenu())
dbPatcher(() => {
changeLanguage(appConfig.language, () => setApplicationMenu())
mainWindow.loadURL(
url.format({
pathname: path.join(__dirname, "app.html"),
protocol: "file:",
slashes: true
})
);
mainWindow.loadURL(
url.format({
pathname: path.join(__dirname, "app.html"),
protocol: "file:",
slashes: true
})
);
if (env.name === "development") {
mainWindow.openDevTools();
}
if (env.name === "development") {
mainWindow.openDevTools();
}
if(process.platform === 'darwin')
tray = new Tray(`${resourcesPath}/icons/19x19.png`)
else
tray = new Tray(`${resourcesPath}/icons/512x512.png`)
if(process.platform === 'darwin')
tray = new Tray(`${resourcesPath}/icons/19x19.png`)
else
tray = new Tray(`${resourcesPath}/icons/512x512.png`)
tray.on('click', () => {
mainWindow.isVisible() ? mainWindow.hide() : mainWindow.show()
})
mainWindow.on('show', () => {
tray.setHighlightMode('always')
})
mainWindow.on('hide', () => {
tray.setHighlightMode('never')
})
tray.on('click', () => {
mainWindow.isVisible() ? mainWindow.hide() : mainWindow.show()
})
mainWindow.on('show', () => {
tray.setHighlightMode('always')
})
mainWindow.on('hide', () => {
tray.setHighlightMode('never')
})
mainWindow.on('close', (event) => {
if (!app.isQuiting && appConfig.trayOnClose && process.platform !== 'linux') {
event.preventDefault()
mainWindow.hide()
return
}
})
mainWindow.on('closed', () => {
mainWindow = undefined
})
mainWindow.on('close', (event) => {
if (!app.isQuiting && appConfig.trayOnClose && process.platform !== 'linux') {
event.preventDefault()
mainWindow.hide()
return
}
})
mainWindow.on('closed', () => {
mainWindow = undefined
})
mainWindow.on('minimize', (event) => {
if(appConfig.trayOnMinimize)
{
event.preventDefault();
mainWindow.hide();
}
});
mainWindow.on('minimize', (event) => {
if(appConfig.trayOnMinimize)
{
event.preventDefault();
mainWindow.hide();
}
});
var contextMenu = Menu.buildFromTemplate([
{ label: 'Show', click: function(){
mainWindow.show();
} },
{ label: 'Quit', click: function(){
app.isQuiting = true;
if (sphinx)
stop()
else
app.quit()
} }
]);
var contextMenu = Menu.buildFromTemplate([
{ label: 'Show', click: function(){
mainWindow.show();
} },
{ label: 'Quit', click: function(){
app.isQuiting = true;
if (sphinx)
stop()
else
app.quit()
} }
]);
tray.setContextMenu(contextMenu)
tray.setToolTip('Rats on The Boat search')
tray.setContextMenu(contextMenu)
tray.setToolTip('Rats on The Boat search')
mainWindow.webContents.on('will-navigate', e => { e.preventDefault() })
mainWindow.webContents.on('new-window', (event, url, frameName) => {
if(frameName == '_self')
{
event.preventDefault()
mainWindow.loadURL(url)
}
})
mainWindow.webContents.on('will-navigate', e => { e.preventDefault() })
mainWindow.webContents.on('new-window', (event, url, frameName) => {
if(frameName == '_self')
{
event.preventDefault()
mainWindow.loadURL(url)
}
})
if (env.name === "production" && !portative) { autoUpdater.checkForUpdates() }
if (env.name === "production" && !portative) { autoUpdater.checkForUpdates() }
spider = new spiderCall((...data) => {
if(mainWindow)
mainWindow.webContents.send(...data)
}, (message, callback) => {
ipcMain.on(message, (event, arg) => {
if(Array.isArray(arg) && typeof arg[arg.length - 1] === 'object' && arg[arg.length - 1].callback)
{
const id = arg[arg.length - 1].callback
arg[arg.length - 1] = (responce) => {
mainWindow.webContents.send('callback', id, responce)
}
}
callback.apply(null, arg)
})
}, app.getPath("userData"), app.getVersion(), env.name)
}, mainWindow, sphinx)
}, app.getPath("userData"), () => app.quit())
spider = new spiderCall((...data) => {
if(mainWindow)
mainWindow.webContents.send(...data)
}, (message, callback) => {
ipcMain.on(message, (event, arg) => {
if(Array.isArray(arg) && typeof arg[arg.length - 1] === 'object' && arg[arg.length - 1].callback)
{
const id = arg[arg.length - 1].callback
arg[arg.length - 1] = (responce) => {
mainWindow.webContents.send('callback', id, responce)
}
}
callback.apply(null, arg)
})
}, app.getPath("userData"), app.getVersion(), env.name)
}, mainWindow, sphinx)
}, app.getPath("userData"), () => app.quit())
});
let stopProtect = false
const stop = () => {
if(stopProtect)
return
stopProtect = true
if(stopProtect)
return
stopProtect = true
if(tray)
tray.destroy()
if(tray)
tray.destroy()
if(spider)
{
spider.stop(() => sphinx.stop())
}
else
{
sphinx.stop()
}
if(spider)
{
spider.stop(() => sphinx.stop())
}
else
{
sphinx.stop()
}
}
app.on("window-all-closed", () => {
if (sphinx)
stop()
else
app.quit()
if (sphinx)
stop()
else
app.quit()
});
app.on('before-quit', () => {
app.isQuiting = true
if (sphinx)
stop()
app.isQuiting = true
if (sphinx)
stop()
})

View File

@ -11,88 +11,88 @@ const config = require('../config')
class Client extends Emiter
{
constructor(options) {
super();
this.timeout = config.downloader.timeout;
this.maxConnections = config.downloader.maxConnections;
debug('timeout', this.timeout)
debug('maxConnections', this.maxConnections)
this.activeConnections = 0;
this.peers = new PeerQueue(this.maxConnections);
this.on('download', this._download);
constructor(options) {
super();
this.timeout = config.downloader.timeout;
this.maxConnections = config.downloader.maxConnections;
debug('timeout', this.timeout)
debug('maxConnections', this.maxConnections)
this.activeConnections = 0;
this.peers = new PeerQueue(this.maxConnections);
this.on('download', this._download);
// if (typeof options.ignore === 'function') {
// this.ignore = options.ignore;
//}
//else {
this.ignore = function (infohash, rinfo, ignore) {
ignore(false);
};
// }
}
// if (typeof options.ignore === 'function') {
// this.ignore = options.ignore;
//}
//else {
this.ignore = function (infohash, rinfo, ignore) {
ignore(false);
};
// }
}
_next(infohash, successful) {
var req = this.peers.shift(infohash, successful);
if (req) {
this.ignore(req.infohash.toString('hex'), req.rinfo, (drop) => {
if (!drop) {
this.emit('download', req.rinfo, req.infohash);
}
});
}
}
_next(infohash, successful) {
var req = this.peers.shift(infohash, successful);
if (req) {
this.ignore(req.infohash.toString('hex'), req.rinfo, (drop) => {
if (!drop) {
this.emit('download', req.rinfo, req.infohash);
}
});
}
}
_download(rinfo, infohash)
{
debug('start download', infohash.toString('hex'), 'connections', this.activeConnections);
this.activeConnections++;
_download(rinfo, infohash)
{
debug('start download', infohash.toString('hex'), 'connections', this.activeConnections);
this.activeConnections++;
var successful = false;
var socket = new net.Socket();
var successful = false;
var socket = new net.Socket();
socket.setTimeout(this.timeout || 5000);
socket.connect(rinfo.port, rinfo.address, () => {
var wire = new Wire(infohash);
socket.pipe(wire).pipe(socket);
socket.setTimeout(this.timeout || 5000);
socket.connect(rinfo.port, rinfo.address, () => {
var wire = new Wire(infohash);
socket.pipe(wire).pipe(socket);
wire.on('metadata', (metadata, infoHash) => {
successful = true;
debug('successfuly downloader', infoHash, rinfo);
this.emit('complete', metadata, infoHash, rinfo);
socket.destroy();
});
wire.on('metadata', (metadata, infoHash) => {
successful = true;
debug('successfuly downloader', infoHash, rinfo);
this.emit('complete', metadata, infoHash, rinfo);
socket.destroy();
});
wire.on('fail', () => {
socket.destroy();
});
wire.on('fail', () => {
socket.destroy();
});
wire.sendHandshake();
});
wire.sendHandshake();
});
socket.on('error', (err) => {
socket.destroy();
});
socket.on('error', (err) => {
socket.destroy();
});
socket.on('timeout', (err) => {
socket.destroy();
});
socket.on('timeout', (err) => {
socket.destroy();
});
socket.once('close', () => {
this.activeConnections--;
this._next(infohash, successful);
});
}
socket.once('close', () => {
this.activeConnections--;
this._next(infohash, successful);
});
}
add(rinfo, infohash) {
this.peers.push({infohash: infohash, rinfo: rinfo});
if (this.activeConnections < this.maxConnections && this.peers.length() > 0) {
this._next();
}
}
add(rinfo, infohash) {
this.peers.push({infohash: infohash, rinfo: rinfo});
if (this.activeConnections < this.maxConnections && this.peers.length() > 0) {
this._next();
}
}
isIdle() {
return this.peers.length() === 0;
}
isIdle() {
return this.peers.length() === 0;
}
}
module.exports = Client;

View File

@ -8,28 +8,28 @@ let sw = false
const cpuTimer = setInterval(() => {
if(!sw) {
keepTime = process.hrtime();
keepUsage = process.cpuUsage();
sw = true;
keepUsage = process.cpuUsage();
sw = true;
} else {
startTime = keepTime;
startUsage = keepUsage;
sw = false;
}
}
}, 500)
cpuTimer.unref()
module.exports = () => {
function secNSec2ms (secNSec) {
return secNSec[0] * 1000 + secNSec[1] / 1000000
}
function secNSec2ms (secNSec) {
return secNSec[0] * 1000 + secNSec[1] / 1000000
}
var elapTime = process.hrtime(startTime)
var elapUsage = process.cpuUsage(startUsage)
var elapTime = process.hrtime(startTime)
var elapUsage = process.cpuUsage(startUsage)
var elapTimeMS = secNSec2ms(elapTime)
var elapUserMS = elapUsage.user
var elapSystMS = elapUsage.system
var elapTimeMS = secNSec2ms(elapTime)
var elapUserMS = elapUsage.user
var elapSystMS = elapUsage.system
return Math.round(100 * ((elapUserMS + elapSystMS) / 1000) / elapTimeMS)
return Math.round(100 * ((elapUserMS + elapSystMS) / 1000) / elapTimeMS)
}

View File

@ -1,55 +1,55 @@
'use strict';
var PeerQueue = function (maxSize, perLimit) {
this.maxSize = maxSize || 200;
this.perLimit = perLimit || 10;
this.peers = {};
this.reqs = [];
this.maxSize = maxSize || 200;
this.perLimit = perLimit || 10;
this.peers = {};
this.reqs = [];
};
PeerQueue.prototype._shift = function () {
if (this.length() > 0) {
var req = this.reqs.shift();
this.peers[req.infohash.toString('hex')] = [];
return req;
}
if (this.length() > 0) {
var req = this.reqs.shift();
this.peers[req.infohash.toString('hex')] = [];
return req;
}
};
PeerQueue.prototype.push = function (peer) {
var infohashHex = peer.infohash.toString('hex');
var peers = this.peers[infohashHex];
var infohashHex = peer.infohash.toString('hex');
var peers = this.peers[infohashHex];
if (peers && peers.length < this.perLimit) {
peers.push(peer);
}
else if (this.length() < this.maxSize) {
this.reqs.push(peer);
}
if (peers && peers.length < this.perLimit) {
peers.push(peer);
}
else if (this.length() < this.maxSize) {
this.reqs.push(peer);
}
};
PeerQueue.prototype.shift = function (infohash, successful) {
if (infohash) {
var infohashHex = infohash.toString('hex');
if (successful === true) {
delete this.peers[infohashHex];
}
else {
var peers = this.peers[infohashHex];
if (peers) {
if (peers.length > 0) {
return peers.shift();
}
else {
delete this.peers[infohashHex];
}
}
}
}
return this._shift();
if (infohash) {
var infohashHex = infohash.toString('hex');
if (successful === true) {
delete this.peers[infohashHex];
}
else {
var peers = this.peers[infohashHex];
if (peers) {
if (peers.length > 0) {
return peers.shift();
}
else {
delete this.peers[infohashHex];
}
}
}
}
return this._shift();
};
PeerQueue.prototype.length = function () {
return this.reqs.length;
return this.reqs.length;
};
module.exports = PeerQueue;

View File

@ -9,312 +9,312 @@ const config = require('../config')
const fs = require('fs')
const bootstraps = [{
address: 'router.bittorrent.com',
port: 6881
address: 'router.bittorrent.com',
port: 6881
}, {
address: 'router.utorrent.com',
port: 6881
address: 'router.utorrent.com',
port: 6881
}, {
address: 'dht.transmissionbt.com',
port: 6881
address: 'dht.transmissionbt.com',
port: 6881
}, {
address: 'dht.aelitis.com',
port: 6881
address: 'dht.aelitis.com',
port: 6881
}]
function isValidPort(port) {
return port > 0 && port < (1 << 16)
return port > 0 && port < (1 << 16)
}
function generateTid() {
return parseInt(Math.random() * 99).toString()
return parseInt(Math.random() * 99).toString()
}
class Spider extends Emiter {
constructor(client) {
super()
const options = arguments.length? arguments[0]: {}
this.table = new Table(options.tableCaption || 1000)
this.bootstraps = options.bootstraps || bootstraps
this.token = new Token()
this.client = client
this.ignore = false; // ignore all requests
this.initialized = false;
constructor(client) {
super()
const options = arguments.length? arguments[0]: {}
this.table = new Table(options.tableCaption || 1000)
this.bootstraps = options.bootstraps || bootstraps
this.token = new Token()
this.client = client
this.ignore = false; // ignore all requests
this.initialized = false;
this.walkInterval = config.spider.walkInterval;
this.foundSpeed = 0;
this.foundCounter = 0;
setInterval(() => {
this.foundSpeed = this.foundCounter;
this.foundCounter = 0;
}, 1000)
this.walkInterval = config.spider.walkInterval;
this.foundSpeed = 0;
this.foundCounter = 0;
setInterval(() => {
this.foundSpeed = this.foundCounter;
this.foundCounter = 0;
}, 1000)
this.announceHashes = []
}
this.announceHashes = []
}
send(message, address) {
const data = bencode.encode(message)
this.udp.send(data, 0, data.length, address.port, address.address)
}
send(message, address) {
const data = bencode.encode(message)
this.udp.send(data, 0, data.length, address.port, address.address)
}
findNode(id, address) {
const message = {
t: generateTid(),
y: 'q',
q: 'find_node',
a: {
id: id,
target: Node.generateID()
}
}
this.send(message, address)
}
findNode(id, address) {
const message = {
t: generateTid(),
y: 'q',
q: 'find_node',
a: {
id: id,
target: Node.generateID()
}
}
this.send(message, address)
}
getPeersRequest(infoHash, address) {
const message = {
t: generateTid(),
y: 'q',
q: 'get_peers',
a: {
id: this.table.id,
info_hash: infoHash
}
}
this.send(message, address)
}
getPeersRequest(infoHash, address) {
const message = {
t: generateTid(),
y: 'q',
q: 'get_peers',
a: {
id: this.table.id,
info_hash: infoHash
}
}
this.send(message, address)
}
announcePeer(infoHash, token, address, port)
{
const message = {
t: generateTid(),
y: 'q',
q: 'announce_peer',
a: {
id: this.table.id,
token: token,
info_hash: infoHash,
port: port,
implied_port: port ? 0 : 1
}
}
this.send(message, address)
}
announcePeer(infoHash, token, address, port)
{
const message = {
t: generateTid(),
y: 'q',
q: 'announce_peer',
a: {
id: this.table.id,
token: token,
info_hash: infoHash,
port: port,
implied_port: port ? 0 : 1
}
}
this.send(message, address)
}
join() {
this.bootstraps.forEach((bootstrap) => {
this.findNode(this.table.id, bootstrap)
})
}
join() {
this.bootstraps.forEach((bootstrap) => {
this.findNode(this.table.id, bootstrap)
})
}
walk() {
if(this.closing)
return
walk() {
if(this.closing)
return
if(!this.client || this.client.isIdle()) {
if(!this.ignore)
{
const node = this.table.shift()
if (node && (config.spider.nodesUsage === 0 || parseInt(Math.random() * this.table.nodes.length / config.spider.nodesUsage) === 0)) {
this.findNode(Node.neighbor(node.id, this.table.id), {address: node.address, port: node.port})
}
}
}
setTimeout(()=>this.walk(), this.walkInterval)
}
if(!this.client || this.client.isIdle()) {
if(!this.ignore)
{
const node = this.table.shift()
if (node && (config.spider.nodesUsage === 0 || parseInt(Math.random() * this.table.nodes.length / config.spider.nodesUsage) === 0)) {
this.findNode(Node.neighbor(node.id, this.table.id), {address: node.address, port: node.port})
}
}
}
setTimeout(()=>this.walk(), this.walkInterval)
}
onFoundNodes(data, token, address) {
const nodes = Node.decodeNodes(data)
nodes.forEach((node) => {
if (node.id != this.table.id && isValidPort(node.port)) {
this.table.add(node)
}
})
this.emit('nodes', nodes)
onFoundNodes(data, token, address) {
const nodes = Node.decodeNodes(data)
nodes.forEach((node) => {
if (node.id != this.table.id && isValidPort(node.port)) {
this.table.add(node)
}
})
this.emit('nodes', nodes)
// announce torrents
if(token)
{
for(const hash of this.announceHashes)
{
this.announcePeer(hash, token, address)
}
}
}
// announce torrents
if(token)
{
for(const hash of this.announceHashes)
{
this.announcePeer(hash, token, address)
}
}
}
onFoundPeers(peers, token, address) {
if(token)
{
for(const hash of this.announceHashes)
{
this.announcePeer(hash, token, address)
}
}
onFoundPeers(peers, token, address) {
if(token)
{
for(const hash of this.announceHashes)
{
this.announcePeer(hash, token, address)
}
}
if(!peers || peers.length == 0)
return;
if(!peers || peers.length == 0)
return;
const ips = Node.decodeCompactIP(peers)
this.emit('peer', ips)
}
const ips = Node.decodeCompactIP(peers)
this.emit('peer', ips)
}
onFindNodeRequest(message, address) {
if(config.spider.packagesLimit !== 0 && this.foundSpeed > config.spider.packagesLimit)
{
return
}
onFindNodeRequest(message, address) {
if(config.spider.packagesLimit !== 0 && this.foundSpeed > config.spider.packagesLimit)
{
return
}
const {t: tid, a: {id: nid, target: infohash}} = message
const {t: tid, a: {id: nid, target: infohash}} = message
if (tid === undefined || target.length != 20 || nid.length != 20) {
return
}
if (tid === undefined || target.length != 20 || nid.length != 20) {
return
}
this.send({
t: tid,
y: 'r',
r: {
id: Node.neighbor(nid, this.table.id),
nodes: Node.encodeNodes(this.table.first())
}
}, address)
this.send({
t: tid,
y: 'r',
r: {
id: Node.neighbor(nid, this.table.id),
nodes: Node.encodeNodes(this.table.first())
}
}, address)
// also check hashes of alive ones
for(const hash of this.announceHashes)
{
this.getPeersRequest(hash, address)
}
}
// also check hashes of alive ones
for(const hash of this.announceHashes)
{
this.getPeersRequest(hash, address)
}
}
onGetPeersRequest(message, address) {
if(config.spider.packagesLimit !== 0 && this.foundSpeed > config.spider.packagesLimit)
{
return
}
onGetPeersRequest(message, address) {
if(config.spider.packagesLimit !== 0 && this.foundSpeed > config.spider.packagesLimit)
{
return
}
const {t: tid, a: {id: nid, info_hash: infohash}} = message
const {t: tid, a: {id: nid, info_hash: infohash}} = message
if (tid === undefined || infohash.length != 20 || nid.length != 20) {
return
}
if (tid === undefined || infohash.length != 20 || nid.length != 20) {
return
}
this.send({
t: tid,
y: 'r',
r: {
id: Node.neighbor(nid, this.table.id),
nodes: Node.encodeNodes(this.table.first()),
token: this.token.token
}
}, address)
this.send({
t: tid,
y: 'r',
r: {
id: Node.neighbor(nid, this.table.id),
nodes: Node.encodeNodes(this.table.first()),
token: this.token.token
}
}, address)
this.emit('unensureHash', infohash.toString('hex').toUpperCase())
this.emit('unensureHash', infohash.toString('hex').toUpperCase())
// also check hashes of alive ones
for(const hash of this.announceHashes)
{
this.getPeersRequest(hash, address)
}
}
// also check hashes of alive ones
for(const hash of this.announceHashes)
{
this.getPeersRequest(hash, address)
}
}
onAnnouncePeerRequest(message, address) {
let {t: tid, a: {info_hash: infohash, token: token, id: id, implied_port: implied, port: port}} = message
if (!tid) return
onAnnouncePeerRequest(message, address) {
let {t: tid, a: {info_hash: infohash, token: token, id: id, implied_port: implied, port: port}} = message
if (!tid) return
if (!this.token.isValid(token)) return
if (!this.token.isValid(token)) return
port = (implied != undefined && implied != 0) ? address.port : (port || 0)
if (!isValidPort(port)) return
port = (implied != undefined && implied != 0) ? address.port : (port || 0)
if (!isValidPort(port)) return
this.send({ t: tid, y: 'r', r: { id: Node.neighbor(id, this.table.id) } }, address)
this.send({ t: tid, y: 'r', r: { id: Node.neighbor(id, this.table.id) } }, address)
let addressPair = {
address: address.address,
port: port
};
this.emit('ensureHash', infohash.toString('hex').toUpperCase(), addressPair)
if(this.client && !this.ignore) {
this.client.add(addressPair, infohash);
}
}
let addressPair = {
address: address.address,
port: port
};
this.emit('ensureHash', infohash.toString('hex').toUpperCase(), addressPair)
if(this.client && !this.ignore) {
this.client.add(addressPair, infohash);
}
}
onPingRequest(message, address) {
if(config.spider.packagesLimit !== 0 && this.foundSpeed > config.spider.packagesLimit)
{
return
}
onPingRequest(message, address) {
if(config.spider.packagesLimit !== 0 && this.foundSpeed > config.spider.packagesLimit)
{
return
}
this.send({ t: message.t, y: 'r', r: { id: Node.neighbor(message.a.id, this.table.id) } }, address)
}
this.send({ t: message.t, y: 'r', r: { id: Node.neighbor(message.a.id, this.table.id) } }, address)
}
parse(data, address) {
try {
const message = bencode.decode(data)
if (message.y.toString() == 'r') {
if(message.r.nodes) {
this.foundCounter++;
this.onFoundNodes(message.r.nodes, message.r.token, address)
} else if(message.r.values) {
this.onFoundPeers(message.r.values, message.r.token, address)
}
} else if (message.y.toString() == 'q') {
this.foundCounter++;
switch(message.q.toString()) {
case 'get_peers':
this.onGetPeersRequest(message, address)
break
case 'announce_peer':
this.onAnnouncePeerRequest(message, address)
break
case 'find_node':
this.onFindNodeRequest(message, address)
break
case 'ping':
this.onPingRequest(message, address)
break
}
}
} catch (err) {}
}
parse(data, address) {
try {
const message = bencode.decode(data)
if (message.y.toString() == 'r') {
if(message.r.nodes) {
this.foundCounter++;
this.onFoundNodes(message.r.nodes, message.r.token, address)
} else if(message.r.values) {
this.onFoundPeers(message.r.values, message.r.token, address)
}
} else if (message.y.toString() == 'q') {
this.foundCounter++;
switch(message.q.toString()) {
case 'get_peers':
this.onGetPeersRequest(message, address)
break
case 'announce_peer':
this.onAnnouncePeerRequest(message, address)
break
case 'find_node':
this.onFindNodeRequest(message, address)
break
case 'ping':
this.onPingRequest(message, address)
break
}
}
} catch (err) {}
}
listen(port) {
if(this.initialized)
return
this.initialized = true
listen(port) {
if(this.initialized)
return
this.initialized = true
this.closing = false
this.udp = dgram.createSocket('udp4')
this.udp.bind(port)
this.udp.on('listening', () => {
console.log(`Listen DHT protocol on ${this.udp.address().address}:${this.udp.address().port}`)
})
this.udp.on('message', (data, addr) => {
this.parse(data, addr)
})
this.udp.on('error', (err) => {})
this.joinInterval = setInterval(() => {
if(!this.client || this.client.isIdle()) {
this.join()
}
}, 3000)
this.join()
this.walk()
}
this.closing = false
this.udp = dgram.createSocket('udp4')
this.udp.bind(port)
this.udp.on('listening', () => {
console.log(`Listen DHT protocol on ${this.udp.address().address}:${this.udp.address().port}`)
})
this.udp.on('message', (data, addr) => {
this.parse(data, addr)
})
this.udp.on('error', (err) => {})
this.joinInterval = setInterval(() => {
if(!this.client || this.client.isIdle()) {
this.join()
}
}, 3000)
this.join()
this.walk()
}
close(callback)
{
if(!this.initialized) {
if(callback)
callback()
return
}
clearInterval(this.joinInterval)
this.closing = true
this.udp.close(() => {
this.initialized = false
if(callback)
callback()
})
}
close(callback)
{
if(!this.initialized) {
if(callback)
callback()
return
}
clearInterval(this.joinInterval)
this.closing = true
this.udp.close(() => {
this.initialized = false
if(callback)
callback()
})
}
}
module.exports = Spider

View File

@ -13,119 +13,119 @@ const connectionIdLow = 0x27101980
const requests = {};
let message = function (buf, host, port) {
server.send(buf, 0, buf.length, port, host, function(err, bytes) {
if (err) {
console.log(err.message);
}
});
server.send(buf, 0, buf.length, port, host, function(err, bytes) {
if (err) {
console.log(err.message);
}
});
};
let connectTracker = function(connection) {
debug('start screape connection');
let buffer = new Buffer(16);
debug('start screape connection');
let buffer = new Buffer(16);
const transactionId = Math.floor((Math.random()*100000)+1);
const transactionId = Math.floor((Math.random()*100000)+1);
buffer.fill(0);
buffer.fill(0);
buffer.writeUInt32BE(connectionIdHigh, 0);
buffer.writeUInt32BE(connectionIdLow, 4);
buffer.writeUInt32BE(ACTION_CONNECT, 8);
buffer.writeUInt32BE(transactionId, 12);
buffer.writeUInt32BE(connectionIdHigh, 0);
buffer.writeUInt32BE(connectionIdLow, 4);
buffer.writeUInt32BE(ACTION_CONNECT, 8);
buffer.writeUInt32BE(transactionId, 12);
// очистка старых соединений
for(const transaction in requests) {
if((new Date).getTime() - requests[transaction].date.getTime() > config.udpTrackersTimeout) {
delete requests[transaction];
}
}
// очистка старых соединений
for(const transaction in requests) {
if((new Date).getTime() - requests[transaction].date.getTime() > config.udpTrackersTimeout) {
delete requests[transaction];
}
}
requests[transactionId] = connection;
message(buffer, connection.host, connection.port);
requests[transactionId] = connection;
message(buffer, connection.host, connection.port);
};
let scrapeTorrent = function (connectionIdHigh, connectionIdLow, transactionId) {
let connection = requests[transactionId];
if(!connection)
return;
let connection = requests[transactionId];
if(!connection)
return;
if(!connection.hash || connection.hash.length != 40)
return
if(!connection.hash || connection.hash.length != 40)
return
debug('start scrape');
let buffer = new Buffer(56)
debug('start scrape');
let buffer = new Buffer(56)
buffer.fill(0);
buffer.fill(0);
buffer.writeUInt32BE(connectionIdHigh, 0);
buffer.writeUInt32BE(connectionIdLow, 4);
buffer.writeUInt32BE(ACTION_SCRAPE, 8);
buffer.writeUInt32BE(transactionId, 12);
buffer.writeUInt32BE(connectionIdHigh, 0);
buffer.writeUInt32BE(connectionIdLow, 4);
buffer.writeUInt32BE(ACTION_SCRAPE, 8);
buffer.writeUInt32BE(transactionId, 12);
try
{
buffer.write(connection.hash, 16, buffer.length, 'hex');
// do scrape
message(buffer, connection.host, connection.port);
} catch(error)
{
console.log('ERROR on scrape', error)
}
try
{
buffer.write(connection.hash, 16, buffer.length, 'hex');
// do scrape
message(buffer, connection.host, connection.port);
} catch(error)
{
console.log('ERROR on scrape', error)
}
};
server.on("message", function (msg, rinfo) {
let buffer = new Buffer(msg)
let buffer = new Buffer(msg)
const action = buffer.readUInt32BE(0, 4);
const transactionId = buffer.readUInt32BE(4, 4);
const action = buffer.readUInt32BE(0, 4);
const transactionId = buffer.readUInt32BE(4, 4);
if(!(transactionId in requests))
return;
if(!(transactionId in requests))
return;
debug("returned action: " + action);
debug("returned transactionId: " + transactionId);
debug("returned action: " + action);
debug("returned transactionId: " + transactionId);
if (action === ACTION_CONNECT) {
debug("connect response");
if (action === ACTION_CONNECT) {
debug("connect response");
let connectionIdHigh = buffer.readUInt32BE(8, 4);
let connectionIdLow = buffer.readUInt32BE(12, 4);
let connectionIdHigh = buffer.readUInt32BE(8, 4);
let connectionIdLow = buffer.readUInt32BE(12, 4);
scrapeTorrent(connectionIdHigh, connectionIdLow, transactionId);
scrapeTorrent(connectionIdHigh, connectionIdLow, transactionId);
} else if (action === ACTION_SCRAPE) {
debug("scrape response");
} else if (action === ACTION_SCRAPE) {
debug("scrape response");
let seeders = buffer.readUInt32BE(8, 4);
let completed = buffer.readUInt32BE(12, 4);
let leechers = buffer.readUInt32BE(16, 4);
let seeders = buffer.readUInt32BE(8, 4);
let completed = buffer.readUInt32BE(12, 4);
let leechers = buffer.readUInt32BE(16, 4);
let connection = requests[transactionId];
connection.callback({
host: connection.host,
port: connection.port,
hash: connection.hash,
seeders,
completed,
leechers
})
delete requests[transactionId];
} else if (action === ACTION_ERROR) {
delete requests[transactionId];
console.log("error in scrape response");
}
let connection = requests[transactionId];
connection.callback({
host: connection.host,
port: connection.port,
hash: connection.hash,
seeders,
completed,
leechers
})
delete requests[transactionId];
} else if (action === ACTION_ERROR) {
delete requests[transactionId];
console.log("error in scrape response");
}
});
let getPeersStatistic = (host, port, hash, callback) => {
let connection = {
host, port, hash, callback, date: new Date()
}
connectTracker(connection);
let connection = {
host, port, hash, callback, date: new Date()
}
connectTracker(connection);
}
server.on("listening", function () {
var address = server.address();
console.log("listening udp tracker respose on " + address.address + ":" + address.port);
var address = server.address();
console.log("listening udp tracker respose on " + address.address + ":" + address.port);
});
server.bind(config.udpTrackersPort);

View File

@ -18,230 +18,230 @@ var EXT_HANDSHAKE_ID = 0;
var BT_MSG_ID = 20;
var Wire = function(infohash) {
stream.Duplex.call(this);
stream.Duplex.call(this);
this._bitfield = new BitField(0, { grow: BITFIELD_GROW });
this._infohash = infohash;
this._bitfield = new BitField(0, { grow: BITFIELD_GROW });
this._infohash = infohash;
this._buffer = [];
this._bufferSize = 0;
this._buffer = [];
this._bufferSize = 0;
this._next = null;
this._nextSize = 0;
this._next = null;
this._nextSize = 0;
this._metadata = null;
this._metadataSize = null;
this._numPieces = 0;
this._ut_metadata = null;
this._metadata = null;
this._metadataSize = null;
this._numPieces = 0;
this._ut_metadata = null;
this._onHandshake();
this._onHandshake();
}
util.inherits(Wire, stream.Duplex);
Wire.prototype._onMessageLength = function (buffer) {
if (buffer.length >= 4) {
var length = buffer.readUInt32BE(0);
if (length > 0) {
this._register(length, this._onMessage)
}
}
if (buffer.length >= 4) {
var length = buffer.readUInt32BE(0);
if (length > 0) {
this._register(length, this._onMessage)
}
}
};
Wire.prototype._onMessage = function (buffer) {
this._register(4, this._onMessageLength)
if (buffer[0] == BT_MSG_ID) {
this._onExtended(buffer.readUInt8(1), buffer.slice(2));
}
this._register(4, this._onMessageLength)
if (buffer[0] == BT_MSG_ID) {
this._onExtended(buffer.readUInt8(1), buffer.slice(2));
}
};
Wire.prototype._onExtended = function(ext, buf) {
if (ext === 0) {
try {
this._onExtHandshake(bencode.decode(buf));
}
catch (err) {
this._fail();
}
}
else {
this._onPiece(buf);
}
if (ext === 0) {
try {
this._onExtHandshake(bencode.decode(buf));
}
catch (err) {
this._fail();
}
}
else {
this._onPiece(buf);
}
};
Wire.prototype._register = function (size, next) {
this._nextSize = size;
this._next = next;
this._nextSize = size;
this._next = next;
};
Wire.prototype.end = function() {
stream.Duplex.prototype.end.apply(this, arguments);
stream.Duplex.prototype.end.apply(this, arguments);
};
Wire.prototype._onHandshake = function() {
this._register(1, function(buffer) {
if (buffer.length == 0) {
this.end();
return this._fail();
}
var pstrlen = buffer.readUInt8(0);
this._register(pstrlen + 48, function(handshake) {
var protocol = handshake.slice(0, pstrlen);
if (protocol.toString() !== BT_PROTOCOL.toString()) {
this.end();
this._fail();
return;
}
handshake = handshake.slice(pstrlen);
if ( !!(handshake[5] & 0x10) ) {
this._register(4, this._onMessageLength);
this._sendExtHandshake();
}
else {
this._fail();
}
}.bind(this));
}.bind(this));
this._register(1, function(buffer) {
if (buffer.length == 0) {
this.end();
return this._fail();
}
var pstrlen = buffer.readUInt8(0);
this._register(pstrlen + 48, function(handshake) {
var protocol = handshake.slice(0, pstrlen);
if (protocol.toString() !== BT_PROTOCOL.toString()) {
this.end();
this._fail();
return;
}
handshake = handshake.slice(pstrlen);
if ( !!(handshake[5] & 0x10) ) {
this._register(4, this._onMessageLength);
this._sendExtHandshake();
}
else {
this._fail();
}
}.bind(this));
}.bind(this));
};
Wire.prototype._onExtHandshake = function(extHandshake) {
if (!extHandshake.metadata_size || !extHandshake.m.ut_metadata
if (!extHandshake.metadata_size || !extHandshake.m.ut_metadata
|| extHandshake.metadata_size > MAX_METADATA_SIZE) {
this._fail();
return;
}
this._fail();
return;
}
this._metadataSize = extHandshake.metadata_size;
this._numPieces = Math.ceil(this._metadataSize / PIECE_LENGTH);
this._ut_metadata = extHandshake.m.ut_metadata;
this._metadataSize = extHandshake.metadata_size;
this._numPieces = Math.ceil(this._metadataSize / PIECE_LENGTH);
this._ut_metadata = extHandshake.m.ut_metadata;
this._requestPieces();
this._requestPieces();
}
Wire.prototype._requestPieces = function() {
this._metadata = new Buffer(this._metadataSize);
for (var piece = 0; piece < this._numPieces; piece++) {
this._requestPiece(piece);
}
this._metadata = new Buffer(this._metadataSize);
for (var piece = 0; piece < this._numPieces; piece++) {
this._requestPiece(piece);
}
};
Wire.prototype._requestPiece = function(piece) {
var msg = Buffer.concat([
new Buffer([BT_MSG_ID]),
new Buffer([this._ut_metadata]),
bencode.encode({msg_type: 0, piece: piece})
]);
this._sendMessage(msg);
var msg = Buffer.concat([
new Buffer([BT_MSG_ID]),
new Buffer([this._ut_metadata]),
bencode.encode({msg_type: 0, piece: piece})
]);
this._sendMessage(msg);
};
Wire.prototype._sendPacket = function(packet) {
this.push(packet);
this.push(packet);
};
Wire.prototype._sendMessage = function(msg) {
var buf = new Buffer(4);
buf.writeUInt32BE(msg.length, 0);
this._sendPacket(Buffer.concat([buf, msg]));
var buf = new Buffer(4);
buf.writeUInt32BE(msg.length, 0);
this._sendPacket(Buffer.concat([buf, msg]));
};
Wire.prototype.sendHandshake = function() {
var peerID = Node.generateID();
var packet = Buffer.concat([
new Buffer([BT_PROTOCOL.length]),
BT_PROTOCOL, BT_RESERVED, this._infohash, peerID
]);
this._sendPacket(packet);
var peerID = Node.generateID();
var packet = Buffer.concat([
new Buffer([BT_PROTOCOL.length]),
BT_PROTOCOL, BT_RESERVED, this._infohash, peerID
]);
this._sendPacket(packet);
};
Wire.prototype._sendExtHandshake = function() {
var msg = Buffer.concat([
new Buffer([BT_MSG_ID]),
new Buffer([EXT_HANDSHAKE_ID]),
bencode.encode({m: {ut_metadata: 1}})
]);
this._sendMessage(msg);
var msg = Buffer.concat([
new Buffer([BT_MSG_ID]),
new Buffer([EXT_HANDSHAKE_ID]),
bencode.encode({m: {ut_metadata: 1}})
]);
this._sendMessage(msg);
};
Wire.prototype._onPiece = function(piece) {
var dict, trailer;
try {
var str = piece.toString();
var trailerIndex = str.indexOf('ee') + 2;
dict = bencode.decode(str.substring(0, trailerIndex));
trailer = piece.slice(trailerIndex);
}
catch (err) {
this._fail();
return;
}
if (dict.msg_type != 1) {
this._fail();
return;
}
if (trailer.length > PIECE_LENGTH) {
this._fail();
return;
}
trailer.copy(this._metadata, dict.piece * PIECE_LENGTH);
this._bitfield.set(dict.piece);
this._checkDone();
var dict, trailer;
try {
var str = piece.toString();
var trailerIndex = str.indexOf('ee') + 2;
dict = bencode.decode(str.substring(0, trailerIndex));
trailer = piece.slice(trailerIndex);
}
catch (err) {
this._fail();
return;
}
if (dict.msg_type != 1) {
this._fail();
return;
}
if (trailer.length > PIECE_LENGTH) {
this._fail();
return;
}
trailer.copy(this._metadata, dict.piece * PIECE_LENGTH);
this._bitfield.set(dict.piece);
this._checkDone();
};
Wire.prototype._checkDone = function () {
var done = true;
for (var piece = 0; piece < this._numPieces; piece++) {
if (!this._bitfield.get(piece)) {
done = false;
break;
}
}
if (!done) {
return
}
this._onDone(this._metadata);
var done = true;
for (var piece = 0; piece < this._numPieces; piece++) {
if (!this._bitfield.get(piece)) {
done = false;
break;
}
}
if (!done) {
return
}
this._onDone(this._metadata);
};
Wire.prototype._onDone = function(metadata) {
try {
var info = bencode.decode(metadata).info;
if (info) {
metadata = bencode.encode(info);
}
}
catch (err) {
this._fail();
return;
}
var infohash = crypto.createHash('sha1').update(metadata).digest('hex');
if (this._infohash.toString('hex') != infohash ) {
this._fail();
return false;
}
this.emit('metadata', {info: bencode.decode(metadata, 'utf8')}, this._infohash);
try {
var info = bencode.decode(metadata).info;
if (info) {
metadata = bencode.encode(info);
}
}
catch (err) {
this._fail();
return;
}
var infohash = crypto.createHash('sha1').update(metadata).digest('hex');
if (this._infohash.toString('hex') != infohash ) {
this._fail();
return false;
}
this.emit('metadata', {info: bencode.decode(metadata, 'utf8')}, this._infohash);
};
Wire.prototype._fail = function() {
this.emit('fail');
this.emit('fail');
};
Wire.prototype._write = function (buf, encoding, next) {
this._bufferSize += buf.length;
this._buffer.push(buf);
this._bufferSize += buf.length;
this._buffer.push(buf);
while (this._bufferSize >= this._nextSize) {
var buffer = Buffer.concat(this._buffer);
this._bufferSize -= this._nextSize;
this._buffer = this._bufferSize
? [buffer.slice(this._nextSize)]
: [];
this._next(buffer.slice(0, this._nextSize));
}
while (this._bufferSize >= this._nextSize) {
var buffer = Buffer.concat(this._buffer);
this._bufferSize -= this._nextSize;
this._buffer = this._bufferSize
? [buffer.slice(this._nextSize)]
: [];
this._next(buffer.slice(0, this._nextSize));
}
next(null);
next(null);
}
Wire.prototype._read = function() {
// do nothing
// do nothing
};
module.exports = Wire;

View File

@ -1,10 +1,10 @@
// https://stackoverflow.com/questions/15270902/check-for-internet-connectivity-in-nodejs
module.exports = function checkInternet(cb) {
require('dns').lookup('google.com',function(err) {
if (err && err.code == "ENOTFOUND") {
cb(false);
} else {
cb(true);
}
})
require('dns').lookup('google.com',function(err) {
if (err && err.code == "ENOTFOUND") {
cb(false);
} else {
cb(true);
}
})
}

View File

@ -11,7 +11,7 @@ let config = {
udpTrackersTimeout: 3 * 60 * 1000,
peerId: undefined,
language: 'en',
p2p: true,
p2pConnections: 10,
p2pBootstrap: true,
@ -25,9 +25,9 @@ let config = {
sitemapMaxSize: 25000,
sphinx: {
host : '127.0.0.1',
port : 9306,
connectionLimit: 30
host : '127.0.0.1',
port : 9306,
connectionLimit: 30
},
spider: {
@ -79,8 +79,8 @@ const configProxy = new Proxy(config, {
target[prop] = value
if(!fs.existsSync(configPath))
if(!fs.existsSync(configPath))
fs.writeFileSync(configPath, '{}')
const data = fs.readFileSync(configPath)

View File

@ -13,31 +13,31 @@ const currentVersion = 4
module.exports = async (callback, mainWindow, sphinxApp) => {
const sphinx = await single().waitConnection()
const sphinx = await single().waitConnection()
const setVersion = async (version) => {
await sphinx.query(`delete from version where id = 1`)
await sphinx.query(`insert into version(id, version) values(1, ${version})`)
if(sphinxApp)
fs.writeFileSync(`${sphinxApp.directoryPath}/version.vrs`, version)
}
const setVersion = async (version) => {
await sphinx.query(`delete from version where id = 1`)
await sphinx.query(`insert into version(id, version) values(1, ${version})`)
if(sphinxApp)
fs.writeFileSync(`${sphinxApp.directoryPath}/version.vrs`, version)
}
let patchWindow;
const openPatchWindow = () => {
if(patchWindow)
return
let patchWindow;
const openPatchWindow = () => {
if(patchWindow)
return
if(!BrowserWindow)
return
if(!BrowserWindow)
return
if(mainWindow)
mainWindow.hide()
if(mainWindow)
mainWindow.hide()
patchWindow = new BrowserWindow({width: 800, height: 400, closable: false})
patchWindow = new BrowserWindow({width: 800, height: 400, closable: false})
patchWindow.setMenu(null)
patchWindow.setMenu(null)
patchWindow.loadURL("data:text/html;charset=utf-8," + encodeURI(`
patchWindow.loadURL("data:text/html;charset=utf-8," + encodeURI(`
<html>
<head><title>Database patching...</title></head>
<style>
@ -97,146 +97,146 @@ module.exports = async (callback, mainWindow, sphinxApp) => {
</body>
</html>
`))
}
}
const patch = async (version) => {
console.log('db version', version)
switch(version)
{
case 1:
{
console.log('patch db to version 2')
openPatchWindow()
let i = 1
const patch = async (version) => {
console.log('db version', version)
switch(version)
{
case 1:
{
console.log('patch db to version 2')
openPatchWindow()
let i = 1
const torrents = (await sphinx.query("SELECT COUNT(*) AS c FROM torrents"))[0].c
const files = (await sphinx.query("SELECT COUNT(*) AS c FROM files"))[0].c
const torrents = (await sphinx.query("SELECT COUNT(*) AS c FROM torrents"))[0].c
const files = (await sphinx.query("SELECT COUNT(*) AS c FROM files"))[0].c
await forBigTable(sphinx, 'torrents', async (torrent) => {
console.log('update index', torrent.id, torrent.name, '[', i, 'of', torrents, ']')
if(patchWindow)
patchWindow.webContents.send('reindex', {field: torrent.name, index: i++, all: torrents, torrent: true})
await forBigTable(sphinx, 'torrents', async (torrent) => {
console.log('update index', torrent.id, torrent.name, '[', i, 'of', torrents, ']')
if(patchWindow)
patchWindow.webContents.send('reindex', {field: torrent.name, index: i++, all: torrents, torrent: true})
torrent.nameIndex = torrent.name
await sphinx.query(`DELETE FROM torrents WHERE id = ${torrent.id}`)
await sphinx.insertValues('torrents', torrent)
})
i = 1
await forBigTable(sphinx, 'files', async (file) => {
console.log('update index', file.id, file.path, '[', i, 'of', files, ']')
if(patchWindow)
patchWindow.webContents.send('reindex', {field: file.path, index: i++, all: files})
torrent.nameIndex = torrent.name
await sphinx.query(`DELETE FROM torrents WHERE id = ${torrent.id}`)
await sphinx.insertValues('torrents', torrent)
})
i = 1
await forBigTable(sphinx, 'files', async (file) => {
console.log('update index', file.id, file.path, '[', i, 'of', files, ']')
if(patchWindow)
patchWindow.webContents.send('reindex', {field: file.path, index: i++, all: files})
file.pathIndex = file.path
await sphinx.query(`DELETE FROM files WHERE id = ${file.id}`)
await sphinx.insertValues('files', file)
})
file.pathIndex = file.path
await sphinx.query(`DELETE FROM files WHERE id = ${file.id}`)
await sphinx.insertValues('files', file)
})
await setVersion(2)
}
case 2:
{
openPatchWindow()
await setVersion(2)
}
case 2:
{
openPatchWindow()
console.log('optimizing torrents')
if(patchWindow)
patchWindow.webContents.send('optimize', {field: 'torrents'})
sphinx.query(`OPTIMIZE INDEX torrents`)
await sphinxApp.waitOptimized('torrents')
console.log('optimizing torrents')
if(patchWindow)
patchWindow.webContents.send('optimize', {field: 'torrents'})
sphinx.query(`OPTIMIZE INDEX torrents`)
await sphinxApp.waitOptimized('torrents')
console.log('optimizing files')
if(patchWindow)
patchWindow.webContents.send('optimize', {field: 'files'})
sphinx.query(`OPTIMIZE INDEX files`)
await sphinxApp.waitOptimized('files')
console.log('optimizing files')
if(patchWindow)
patchWindow.webContents.send('optimize', {field: 'files'})
sphinx.query(`OPTIMIZE INDEX files`)
await sphinxApp.waitOptimized('files')
await setVersion(3)
}
case 3:
{
openPatchWindow()
await setVersion(3)
}
case 3:
{
openPatchWindow()
// block xxx
let bad = 0
// block xxx
let bad = 0
let i = 1
const torrents = (await sphinx.query("SELECT COUNT(*) AS c FROM torrents"))[0].c
await forBigTable(sphinx, 'torrents', async (torrent) => {
console.log('update index', torrent.id, torrent.name, '[', i, 'of', torrents, '] - delete:', bad)
if(patchWindow)
patchWindow.webContents.send('reindex', {field: torrent.name, index: i++, all: torrents, torrent: true})
let i = 1
const torrents = (await sphinx.query("SELECT COUNT(*) AS c FROM torrents"))[0].c
await forBigTable(sphinx, 'torrents', async (torrent) => {
console.log('update index', torrent.id, torrent.name, '[', i, 'of', torrents, '] - delete:', bad)
if(patchWindow)
patchWindow.webContents.send('reindex', {field: torrent.name, index: i++, all: torrents, torrent: true})
if(torrent.contentcategory == 'xxx')
{
delete torrent.contentcategory
delete torrent.contenttype
torrent = await getTorrent(sphinx, null, torrent) // get files
torrentTypeDetect(torrent, torrent.filesList)
if(torrent.contentType == 'bad')
{
console.log('remove bad torrent', torrent.name)
bad++
await sphinx.query(`DELETE FROM torrents WHERE hash = '${torrent.hash}'`)
await sphinx.query(`DELETE FROM files WHERE hash = '${torrent.hash}'`)
}
}
})
if(torrent.contentcategory == 'xxx')
{
delete torrent.contentcategory
delete torrent.contenttype
torrent = await getTorrent(sphinx, null, torrent) // get files
torrentTypeDetect(torrent, torrent.filesList)
if(torrent.contentType == 'bad')
{
console.log('remove bad torrent', torrent.name)
bad++
await sphinx.query(`DELETE FROM torrents WHERE hash = '${torrent.hash}'`)
await sphinx.query(`DELETE FROM files WHERE hash = '${torrent.hash}'`)
}
}
})
console.log('removed', bad, 'torrents')
console.log('removed', bad, 'torrents')
await setVersion(4)
}
}
console.log('db patch done')
sphinx.destroy()
if(patchWindow)
{
patchWindow.destroy()
if(mainWindow)
mainWindow.show()
}
callback()
}
await setVersion(4)
}
}
console.log('db patch done')
sphinx.destroy()
if(patchWindow)
{
patchWindow.destroy()
if(mainWindow)
mainWindow.show()
}
callback()
}
// init of db, we can set version to last
if(sphinxApp && sphinxApp.isInitDb)
{
console.log('new db, set version to last version', currentVersion)
await setVersion(currentVersion)
}
// init of db, we can set version to last
if(sphinxApp && sphinxApp.isInitDb)
{
console.log('new db, set version to last version', currentVersion)
await setVersion(currentVersion)
}
sphinx.query('select * from version', async (err, version) => {
if(err)
{
console.log('error on version get on db patch')
return
}
sphinx.query('select * from version', async (err, version) => {
if(err)
{
console.log('error on version get on db patch')
return
}
if(!version || !version[0] || !version[0].version)
{
if(sphinxApp && fs.existsSync(`${sphinxApp.directoryPath}/version.vrs`))
{
const ver = parseInt(fs.readFileSync(`${sphinxApp.directoryPath}/version.vrs`))
if(ver > 0)
{
console.log('readed version from version.vrs', ver)
patch(ver)
}
else
{
console.log('error: bad version in version.vrs')
}
}
else
{
console.log('version not founded, set db version to 1')
await setVersion(1)
patch(1)
}
}
else
{
patch(version[0].version)
}
})
if(!version || !version[0] || !version[0].version)
{
if(sphinxApp && fs.existsSync(`${sphinxApp.directoryPath}/version.vrs`))
{
const ver = parseInt(fs.readFileSync(`${sphinxApp.directoryPath}/version.vrs`))
if(ver > 0)
{
console.log('readed version from version.vrs', ver)
patch(ver)
}
else
{
console.log('error: bad version in version.vrs')
}
}
else
{
console.log('version not founded, set db version to 1')
await setVersion(1)
patch(1)
}
}
else
{
patch(version[0].version)
}
})
}

View File

@ -2,67 +2,67 @@ const path = require('path')
const fs = require('fs')
module.exports = (app) => {
if (fs.existsSync(`./${app}`)) {
return `./${app}`
}
if (fs.existsSync(`./${app}`)) {
return `./${app}`
}
if (/^win/.test(process.platform) && fs.existsSync(`./${app}.exe`)) {
return `./${app}.exe`
}
if (/^win/.test(process.platform) && fs.existsSync(`./${app}.exe`)) {
return `./${app}.exe`
}
if (/^win/.test(process.platform) && fs.existsSync(`./${process.arch}/${app}.exe`)) {
return `./${process.arch}/${app}.exe`
}
if (/^win/.test(process.platform) && fs.existsSync(`./${process.arch}/${app}.exe`)) {
return `./${process.arch}/${app}.exe`
}
if(/^win/.test(process.platform) && fs.existsSync(path.dirname(process.execPath) + `/${app}.exe`)) {
return path.dirname(process.execPath) + `/${app}.exe`
}
if(/^win/.test(process.platform) && fs.existsSync(path.dirname(process.execPath) + `/${app}.exe`)) {
return path.dirname(process.execPath) + `/${app}.exe`
}
if(/^win/.test(process.platform) && fs.existsSync(path.dirname(process.execPath) + `/${process.arch}/${app}.exe`)) {
return path.dirname(process.execPath) + `/${process.arch}/${app}.exe`
}
if(/^win/.test(process.platform) && fs.existsSync(path.dirname(process.execPath) + `/${process.arch}/${app}.exe`)) {
return path.dirname(process.execPath) + `/${process.arch}/${app}.exe`
}
if (fs.existsSync(fs.realpathSync(__dirname) + `/${app}`)) {
return fs.realpathSync(__dirname) + `/${app}`
}
if (fs.existsSync(fs.realpathSync(__dirname) + `/${app}`)) {
return fs.realpathSync(__dirname) + `/${app}`
}
if (fs.existsSync(fs.realpathSync(__dirname) + `/${process.arch}/${app}`)) {
return fs.realpathSync(__dirname) + `/${process.arch}/${app}`
}
if (fs.existsSync(fs.realpathSync(__dirname) + `/${process.arch}/${app}`)) {
return fs.realpathSync(__dirname) + `/${process.arch}/${app}`
}
if (fs.existsSync(fs.realpathSync(path.join(__dirname, '/../../..')) + `/${app}`)) {
return fs.realpathSync(path.join(__dirname, '/../../..')) + `/${app}`
}
if (fs.existsSync(fs.realpathSync(path.join(__dirname, '/../../..')) + `/${app}`)) {
return fs.realpathSync(path.join(__dirname, '/../../..')) + `/${app}`
}
if (fs.existsSync(fs.realpathSync(path.join(__dirname, '/../../..')) + `/${process.arch}/${app}`)) {
return fs.realpathSync(path.join(__dirname, '/../../..')) + `/${process.arch}/${app}`
}
if (fs.existsSync(fs.realpathSync(path.join(__dirname, '/../../..')) + `/${process.arch}/${app}`)) {
return fs.realpathSync(path.join(__dirname, '/../../..')) + `/${process.arch}/${app}`
}
try {
if (process.platform === 'darwin' && fs.existsSync(fs.realpathSync(path.join(__dirname, '/../../../MacOS')) + `/${app}`)) {
return fs.realpathSync(path.join(__dirname, '/../../../MacOS')) + `/${app}`
}
} catch (e) {}
try {
if (process.platform === 'darwin' && fs.existsSync(fs.realpathSync(path.join(__dirname, '/../../../MacOS')) + `/${app}`)) {
return fs.realpathSync(path.join(__dirname, '/../../../MacOS')) + `/${app}`
}
} catch (e) {}
if (/^win/.test(process.platform) && fs.existsSync(`imports/win/${app}.exe`)) {
return `imports/win/${app}.exe`
}
if (/^win/.test(process.platform) && fs.existsSync(`imports/win/${app}.exe`)) {
return `imports/win/${app}.exe`
}
if (/^win/.test(process.platform) && fs.existsSync(`imports/win/${process.arch}/${app}.exe`)) {
return `imports/win/${process.arch}/${app}.exe`
}
if (/^win/.test(process.platform) && fs.existsSync(`imports/win/${process.arch}/${app}.exe`)) {
return `imports/win/${process.arch}/${app}.exe`
}
if (process.platform === 'linux' && fs.existsSync(`imports/linux/${app}`)) {
return `imports/linux/${app}`
}
if (process.platform === 'linux' && fs.existsSync(`imports/linux/${app}`)) {
return `imports/linux/${app}`
}
if (process.platform === 'linux' && fs.existsSync(`imports/linux/${process.arch}/${app}`)) {
return `imports/linux/${process.arch}/${app}`
}
if (process.platform === 'linux' && fs.existsSync(`imports/linux/${process.arch}/${app}`)) {
return `imports/linux/${process.arch}/${app}`
}
if (process.platform === 'darwin' && fs.existsSync(`imports/darwin/${app}`)) {
return `imports/darwin/${app}`
}
if (process.platform === 'darwin' && fs.existsSync(`imports/darwin/${app}`)) {
return `imports/darwin/${app}`
}
return `${app}`
return `${app}`
}

View File

@ -1,22 +1,22 @@
module.exports = class Feed {
constructor({sphinx})
{
this.feed = []
this.sphinx = sphinx
this.loaded = false
this.max = 1000
}
constructor({sphinx})
{
this.feed = []
this.sphinx = sphinx
this.loaded = false
this.max = 1000
}
size()
{
return this.feed.length
}
size()
{
return this.feed.length
}
async save() {
if(!this.loaded)
return // feed not loaded on begining, ignore saving
async save() {
if(!this.loaded)
return // feed not loaded on begining, ignore saving
console.log('saving feed')
console.log('saving feed')
await this.sphinx.query('delete from feed where id > 0')
let id = 0
return Promise.all(
@ -24,77 +24,77 @@ module.exports = class Feed {
async record => await this.sphinx.query('insert into feed(id, data) values(?, ?)', [++id, JSON.stringify(record)])
)
)
}
}
async load() {
this.feed = await this.sphinx.query('select * from feed limit 1000')
if(this.feed && this.feed.length > 0)
this.feed = this.feed.map(f => JSON.parse(f.data))
else
this.feed = []
async load() {
this.feed = await this.sphinx.query('select * from feed limit 1000')
if(this.feed && this.feed.length > 0)
this.feed = this.feed.map(f => JSON.parse(f.data))
else
this.feed = []
this._order()
this.loaded = true
console.log('lodead feed')
}
this._order()
this.loaded = true
console.log('lodead feed')
}
clear()
{
console.log('clearing feed')
this.feed = []
}
clear()
{
console.log('clearing feed')
this.feed = []
}
add(data) {
let index = -1
if(data.hash)
index = this.feed.findIndex(element => element.hash === data.hash)
add(data) {
let index = -1
if(data.hash)
index = this.feed.findIndex(element => element.hash === data.hash)
if(index >= 0)
this.feed[index] = Object.assign(this.feed[index], data) // just push up element
else
{
if(typeof data == 'object')
{
data.feedDate = Math.floor(Date.now() / 1000)
}
if(index >= 0)
this.feed[index] = Object.assign(this.feed[index], data) // just push up element
else
{
if(typeof data == 'object')
{
data.feedDate = Math.floor(Date.now() / 1000)
}
if(this.feed.length >= this.max)
{
//cleanup
for(let i = this.feed.length - 1; i <= 0; i--)
if(this._compare(this.feed[i]) <= 0)
this.feed.pop()
else
break
if(this.feed.length >= this.max)
{
//cleanup
for(let i = this.feed.length - 1; i <= 0; i--)
if(this._compare(this.feed[i]) <= 0)
this.feed.pop()
else
break
if(this.feed.length >= this.max)
this.feed[this.feed.length - 1] = data // replace last one
else
this.feed.push(data) // insert
}
else
{
this.feed.push(data) // insert
}
}
if(this.feed.length >= this.max)
this.feed[this.feed.length - 1] = data // replace last one
else
this.feed.push(data) // insert
}
else
{
this.feed.push(data) // insert
}
}
this._order()
}
this._order()
}
_order() {
this.feed.sort((a, b) => this._compare(b) - this._compare(a))
}
_order() {
this.feed.sort((a, b) => this._compare(b) - this._compare(a))
}
_compare(x)
{
const rating = (x && x.good) || 0
const comments = 0
const time = Math.floor(Date.now() / 1000) - x.feedDate
_compare(x)
{
const rating = (x && x.good) || 0
const comments = 0
const time = Math.floor(Date.now() / 1000) - x.feedDate
const maxTime = 600000
if(time > maxTime)
time = maxTime
const relativeTime = (maxTime - time) / maxTime
return relativeTime * relativeTime + rating * 1.5 * relativeTime + comments * 4 * relativeTime
}
const maxTime = 600000
if(time > maxTime)
time = maxTime
const relativeTime = (maxTime - time) / maxTime
return relativeTime * relativeTime + rating * 1.5 * relativeTime + comments * 4 * relativeTime
}
}

View File

@ -1,24 +1,24 @@
module.exports = (sphinx, table, callback, doneCallback, max = 1000, where = '') => new Promise((done) => {
const checker = (index = 0) => {
sphinx.query(`SELECT * FROM ${table} WHERE id > ${index} ${where} LIMIT ${max}`, (err, torrents) => {
const finish = () => {
if(err)
console.log('big table parse error', err)
if(doneCallback)
doneCallback(true)
done(true)
}
const checker = (index = 0) => {
sphinx.query(`SELECT * FROM ${table} WHERE id > ${index} ${where} LIMIT ${max}`, (err, torrents) => {
const finish = () => {
if(err)
console.log('big table parse error', err)
if(doneCallback)
doneCallback(true)
done(true)
}
if(!err && torrents.length > 0)
Promise.all(torrents.map(callback)).then(() => {
if(torrents.length === max)
checker(torrents[torrents.length - 1].id)
else
finish()
})
else
finish()
});
}
checker()
if(!err && torrents.length > 0)
Promise.all(torrents.map(callback)).then(() => {
if(torrents.length === max)
checker(torrents[torrents.length - 1].id)
else
finish()
})
else
finish()
});
}
checker()
})

View File

@ -7,78 +7,78 @@ import { app, BrowserWindow, screen } from "electron";
import jetpack from "fs-jetpack";
export default (name, options) => {
const userDataDir = jetpack.cwd(app.getPath("userData"));
const stateStoreFile = `window-state-${name}.json`;
const defaultSize = {
width: options.width,
height: options.height
};
let state = {};
let win;
const userDataDir = jetpack.cwd(app.getPath("userData"));
const stateStoreFile = `window-state-${name}.json`;
const defaultSize = {
width: options.width,
height: options.height
};
let state = {};
let win;
const restore = () => {
let restoredState = {};
try {
restoredState = userDataDir.read(stateStoreFile, "json");
} catch (err) {
// For some reason json can't be read (might be corrupted).
// No worries, we have defaults.
}
return Object.assign({}, defaultSize, restoredState);
};
const restore = () => {
let restoredState = {};
try {
restoredState = userDataDir.read(stateStoreFile, "json");
} catch (err) {
// For some reason json can't be read (might be corrupted).
// No worries, we have defaults.
}
return Object.assign({}, defaultSize, restoredState);
};
const getCurrentPosition = () => {
const position = win.getPosition();
const size = win.getSize();
return {
x: position[0],
y: position[1],
width: size[0],
height: size[1]
};
};
const getCurrentPosition = () => {
const position = win.getPosition();
const size = win.getSize();
return {
x: position[0],
y: position[1],
width: size[0],
height: size[1]
};
};
const windowWithinBounds = (windowState, bounds) => {
return (
windowState.x >= bounds.x &&
const windowWithinBounds = (windowState, bounds) => {
return (
windowState.x >= bounds.x &&
windowState.y >= bounds.y &&
windowState.x + windowState.width <= bounds.x + bounds.width &&
windowState.y + windowState.height <= bounds.y + bounds.height
);
};
);
};
const resetToDefaults = () => {
const bounds = screen.getPrimaryDisplay().bounds;
return Object.assign({}, defaultSize, {
x: (bounds.width - defaultSize.width) / 2,
y: (bounds.height - defaultSize.height) / 2
});
};
const resetToDefaults = () => {
const bounds = screen.getPrimaryDisplay().bounds;
return Object.assign({}, defaultSize, {
x: (bounds.width - defaultSize.width) / 2,
y: (bounds.height - defaultSize.height) / 2
});
};
const ensureVisibleOnSomeDisplay = windowState => {
const visible = screen.getAllDisplays().some(display => {
return windowWithinBounds(windowState, display.bounds);
});
if (!visible) {
// Window is partially or fully not visible now.
// Reset it to safe defaults.
return resetToDefaults();
}
return windowState;
};
const ensureVisibleOnSomeDisplay = windowState => {
const visible = screen.getAllDisplays().some(display => {
return windowWithinBounds(windowState, display.bounds);
});
if (!visible) {
// Window is partially or fully not visible now.
// Reset it to safe defaults.
return resetToDefaults();
}
return windowState;
};
const saveState = () => {
if (!win.isMinimized() && !win.isMaximized()) {
Object.assign(state, getCurrentPosition());
}
userDataDir.write(stateStoreFile, state, { atomic: true });
};
const saveState = () => {
if (!win.isMinimized() && !win.isMaximized()) {
Object.assign(state, getCurrentPosition());
}
userDataDir.write(stateStoreFile, state, { atomic: true });
};
state = ensureVisibleOnSomeDisplay(restore());
state = ensureVisibleOnSomeDisplay(restore());
win = new BrowserWindow(Object.assign({}, options, state));
win = new BrowserWindow(Object.assign({}, options, state));
win.on("close", saveState);
win.on("close", saveState);
return win;
return win;
};

View File

@ -4,94 +4,94 @@ import url from "url";
import __ from '../../app/translation'
export const aboutMenuTemplateFunc = () => ({
label: __("About"),
submenu: [
{
label: __("Changelog"),
accelerator: "CmdOrCtrl+]",
click: () => {
const win = new BrowserWindow({
parent: BrowserWindow.getFocusedWindow(),
modal: true
})
win.setMenu(null)
win.loadURL(url.format({
pathname: path.join(__dirname, "app.html"),
protocol: "file:",
slashes: true
}))
win.webContents.on('did-finish-load', () => {
setTimeout(() => win.send('url', '/changelog'), 0)
});
label: __("About"),
submenu: [
{
label: __("Changelog"),
accelerator: "CmdOrCtrl+]",
click: () => {
const win = new BrowserWindow({
parent: BrowserWindow.getFocusedWindow(),
modal: true
})
win.setMenu(null)
win.loadURL(url.format({
pathname: path.join(__dirname, "app.html"),
protocol: "file:",
slashes: true
}))
win.webContents.on('did-finish-load', () => {
setTimeout(() => win.send('url', '/changelog'), 0)
});
const handleRedirect = (e, url) => {
if(url != win.webContents.getURL()) {
e.preventDefault()
shell.openExternal(url)
}
}
const handleRedirect = (e, url) => {
if(url != win.webContents.getURL()) {
e.preventDefault()
shell.openExternal(url)
}
}
win.webContents.on('will-navigate', handleRedirect)
win.webContents.on('new-window', handleRedirect)
},
},
{
label: __("Bug Report"),
accelerator: "CmdOrCtrl+[",
click: () => {
shell.openExternal('https://github.com/DEgITx/rats-search/issues')
},
},
{
label: __("Donate"),
accelerator: "CmdOrCtrl+*",
click: () => {
const win = new BrowserWindow({
parent: BrowserWindow.getFocusedWindow(),
modal: true,
width: 1000
})
win.setMenu(null)
win.loadURL(url.format({
pathname: path.join(__dirname, "donate.html"),
protocol: "file:",
slashes: true
}))
win.webContents.on('will-navigate', handleRedirect)
win.webContents.on('new-window', handleRedirect)
},
},
{
label: __("Bug Report"),
accelerator: "CmdOrCtrl+[",
click: () => {
shell.openExternal('https://github.com/DEgITx/rats-search/issues')
},
},
{
label: __("Donate"),
accelerator: "CmdOrCtrl+*",
click: () => {
const win = new BrowserWindow({
parent: BrowserWindow.getFocusedWindow(),
modal: true,
width: 1000
})
win.setMenu(null)
win.loadURL(url.format({
pathname: path.join(__dirname, "donate.html"),
protocol: "file:",
slashes: true
}))
const handleRedirect = (e, url) => {
if(url != win.webContents.getURL()) {
if(!url.includes('patreon'))
return
const handleRedirect = (e, url) => {
if(url != win.webContents.getURL()) {
if(!url.includes('patreon'))
return
e.preventDefault()
shell.openExternal(url)
}
}
e.preventDefault()
shell.openExternal(url)
}
}
win.webContents.on('will-navigate', handleRedirect)
win.webContents.on('new-window', handleRedirect)
},
},
{
label: __("Help (Documentation)"),
accelerator: "CmdOrCtrl+?",
click: () => {
shell.openExternal('https://github.com/DEgITx/rats-search/blob/master/docs/MANUAL.md')
},
},
{
label: __("Support (Discussion)"),
accelerator: "CmdOrCtrl+>",
click: () => {
shell.openExternal('https://discord.gg/t9GQtxA')
},
},
{
label: __("About (GitHub)"),
accelerator: "CmdOrCtrl+<",
click: () => {
shell.openExternal('https://github.com/DEgITx/rats-search')
},
}
]
win.webContents.on('will-navigate', handleRedirect)
win.webContents.on('new-window', handleRedirect)
},
},
{
label: __("Help (Documentation)"),
accelerator: "CmdOrCtrl+?",
click: () => {
shell.openExternal('https://github.com/DEgITx/rats-search/blob/master/docs/MANUAL.md')
},
},
{
label: __("Support (Discussion)"),
accelerator: "CmdOrCtrl+>",
click: () => {
shell.openExternal('https://discord.gg/t9GQtxA')
},
},
{
label: __("About (GitHub)"),
accelerator: "CmdOrCtrl+<",
click: () => {
shell.openExternal('https://github.com/DEgITx/rats-search')
},
}
]
});

View File

@ -4,45 +4,45 @@ import path from 'path'
import __, { translationsDir } from '../../app/translation'
export const settingsMenuTemplateFunc = (config, onLanguageChange) => ({
label: __("Settings"),
submenu: [
{
label: __("Main Settings"),
accelerator: "CmdOrCtrl+O",
click: () => {
BrowserWindow.getFocusedWindow().webContents.send('url', '/config')
}
},
{
label: __("Torrents Filters"),
accelerator: "CmdOrCtrl+\\",
click: () => {
BrowserWindow.getFocusedWindow().webContents.send('url', '/filters')
}
},
{
label: __("Language"),
submenu: (() => {
const translations = []
const translationsDirectory = translationsDir()
fs.readdirSync(translationsDirectory).forEach(translation => {
const translationJson = JSON.parse(fs.readFileSync(`${translationsDirectory}/${translation}`, 'utf8'))
const lang = path.basename(translation, '.json')
translations.push({
label: translationJson.nameOriginal,
type: 'checkbox',
checked: config.language === lang,
click: () => {
BrowserWindow.getFocusedWindow().webContents.send('changeLanguage', lang)
config.language = lang
if(onLanguageChange)
onLanguageChange(lang)
console.log('changed translation to:', lang)
}
})
})
return translations
})()
}
]
label: __("Settings"),
submenu: [
{
label: __("Main Settings"),
accelerator: "CmdOrCtrl+O",
click: () => {
BrowserWindow.getFocusedWindow().webContents.send('url', '/config')
}
},
{
label: __("Torrents Filters"),
accelerator: "CmdOrCtrl+\\",
click: () => {
BrowserWindow.getFocusedWindow().webContents.send('url', '/filters')
}
},
{
label: __("Language"),
submenu: (() => {
const translations = []
const translationsDirectory = translationsDir()
fs.readdirSync(translationsDirectory).forEach(translation => {
const translationJson = JSON.parse(fs.readFileSync(`${translationsDirectory}/${translation}`, 'utf8'))
const lang = path.basename(translation, '.json')
translations.push({
label: translationJson.nameOriginal,
type: 'checkbox',
checked: config.language === lang,
click: () => {
BrowserWindow.getFocusedWindow().webContents.send('changeLanguage', lang)
config.language = lang
if(onLanguageChange)
onLanguageChange(lang)
console.log('changed translation to:', lang)
}
})
})
return translations
})()
}
]
});

View File

@ -1,28 +1,28 @@
import { app, BrowserWindow } from "electron";
export const devMenuTemplate = {
label: "Development",
submenu: [
{
label: "Reload",
accelerator: "CmdOrCtrl+R",
click: () => {
BrowserWindow.getFocusedWindow().webContents.reloadIgnoringCache();
}
},
{
label: "Toggle DevTools",
accelerator: "Alt+CmdOrCtrl+I",
click: () => {
BrowserWindow.getFocusedWindow().toggleDevTools();
}
},
{
label: "Quit",
accelerator: "CmdOrCtrl+Q",
click: () => {
app.quit();
}
}
]
label: "Development",
submenu: [
{
label: "Reload",
accelerator: "CmdOrCtrl+R",
click: () => {
BrowserWindow.getFocusedWindow().webContents.reloadIgnoringCache();
}
},
{
label: "Toggle DevTools",
accelerator: "Alt+CmdOrCtrl+I",
click: () => {
BrowserWindow.getFocusedWindow().toggleDevTools();
}
},
{
label: "Quit",
accelerator: "CmdOrCtrl+Q",
click: () => {
app.quit();
}
}
]
};

View File

@ -1,14 +1,14 @@
import __ from '../../app/translation'
export const editMenuTemplateFunc = () => ({
label: __("Edit"),
submenu: [
{ label: __("Undo"), accelerator: "CmdOrCtrl+Z", selector: "undo:" },
{ label: __("Redo"), accelerator: "Shift+CmdOrCtrl+Z", selector: "redo:" },
{ type: "separator" },
{ label: __("Cut"), accelerator: "CmdOrCtrl+X", selector: "cut:" },
{ label: __("Copy"), accelerator: "CmdOrCtrl+C", selector: "copy:" },
{ label: __("Paste"), accelerator: "CmdOrCtrl+V", selector: "paste:" },
{ label: __("Select All"), accelerator: "CmdOrCtrl+A", selector: "selectAll:" }
]
label: __("Edit"),
submenu: [
{ label: __("Undo"), accelerator: "CmdOrCtrl+Z", selector: "undo:" },
{ label: __("Redo"), accelerator: "Shift+CmdOrCtrl+Z", selector: "redo:" },
{ type: "separator" },
{ label: __("Cut"), accelerator: "CmdOrCtrl+X", selector: "cut:" },
{ label: __("Copy"), accelerator: "CmdOrCtrl+C", selector: "copy:" },
{ label: __("Paste"), accelerator: "CmdOrCtrl+V", selector: "paste:" },
{ label: __("Select All"), accelerator: "CmdOrCtrl+A", selector: "selectAll:" }
]
});

View File

@ -4,28 +4,28 @@ import url from "url";
import __ from '../../app/translation'
export const manageMenuTemplateFunc = () => ({
label: __("Manage"),
submenu: [
{
label: __("Downloads"),
accelerator: "CmdOrCtrl+d",
click: () => {
BrowserWindow.getFocusedWindow().webContents.send('url', '/downloads')
},
},
{
label: __("Search"),
accelerator: "CmdOrCtrl+n",
click: () => {
BrowserWindow.getFocusedWindow().webContents.send('url', '/')
},
},
{
label: __("Top"),
accelerator: "CmdOrCtrl+t",
click: () => {
BrowserWindow.getFocusedWindow().webContents.send('url', '/top')
},
}
]
label: __("Manage"),
submenu: [
{
label: __("Downloads"),
accelerator: "CmdOrCtrl+d",
click: () => {
BrowserWindow.getFocusedWindow().webContents.send('url', '/downloads')
},
},
{
label: __("Search"),
accelerator: "CmdOrCtrl+n",
click: () => {
BrowserWindow.getFocusedWindow().webContents.send('url', '/')
},
},
{
label: __("Top"),
accelerator: "CmdOrCtrl+t",
click: () => {
BrowserWindow.getFocusedWindow().webContents.send('url', '/top')
},
}
]
});

View File

@ -2,43 +2,43 @@ const mysql = require('mysql');
const config = require('./config');
const expand = (sphinx) => {
const queryCall = sphinx.query.bind(sphinx)
const queryCall = sphinx.query.bind(sphinx)
sphinx.query = (sql, args, callback) => new Promise((resolve, reject) => {
if(typeof args === 'function' || typeof args === 'undefined')
{
queryCall(sql, (err, res) => {
if(err)
reject(err)
else
resolve(res)
sphinx.query = (sql, args, callback) => new Promise((resolve, reject) => {
if(typeof args === 'function' || typeof args === 'undefined')
{
queryCall(sql, (err, res) => {
if(err)
reject(err)
else
resolve(res)
if(args)
args(err, res)
})
}
else
{
queryCall(sql, args, (err, res) => {
if(err)
reject(err)
else
resolve(res)
if(args)
args(err, res)
})
}
else
{
queryCall(sql, args, (err, res) => {
if(err)
reject(err)
else
resolve(res)
if(callback)
callback(err, res)
})
}
})
if(callback)
callback(err, res)
})
}
})
sphinx.insertValues = (table, values, callback) => new Promise((resolve) => {
sphinx.insertValues = (table, values, callback) => new Promise((resolve) => {
let names = '';
let data = '';
for(const val in values)
{
if(values[val] === null)
continue;
names += '`' + val + '`,';
data += sphinx.escape(values[val]) + ',';
}
@ -46,101 +46,101 @@ const expand = (sphinx) => {
data = data.slice(0, -1)
let query = `INSERT INTO ${table}(${names}) VALUES(${data})`;
queryCall(query, (...responce) => {
if(callback)
callback(...responce)
resolve(...responce)
})
})
if(callback)
callback(...responce)
resolve(...responce)
})
})
sphinx.updateValues = (table, values, whereObject, callback) => new Promise((resolve) => {
let set = ''
for(const val in values)
sphinx.updateValues = (table, values, whereObject, callback) => new Promise((resolve) => {
let set = ''
for(const val in values)
{
if(values[val] === null)
continue;
if(typeof values[val] == 'object')
continue;
if(typeof values[val] == 'object')
continue;
// skip text indexes (manticore bug https://github.com/manticoresoftware/manticoresearch/issues/84)
if(typeof values[val] == 'string')
continue;
// skip text indexes (manticore bug https://github.com/manticoresoftware/manticoresearch/issues/84)
if(typeof values[val] == 'string')
continue;
set += '`' + val + '` = ' + sphinx.escape(values[val]) + ',';
}
if(set.length == 0)
return
set = set.slice(0, -1)
set += '`' + val + '` = ' + sphinx.escape(values[val]) + ',';
}
if(set.length == 0)
return
set = set.slice(0, -1)
let where = ''
for(const w in whereObject)
let where = ''
for(const w in whereObject)
{
if(whereObject[w] === null)
continue;
where += '`' + w + '` = ' + sphinx.escape(whereObject[w]) + ' and';
}
if(where.length == 0)
return
where = where.slice(0, -3)
where += '`' + w + '` = ' + sphinx.escape(whereObject[w]) + ' and';
}
if(where.length == 0)
return
where = where.slice(0, -3)
const query = `UPDATE ${table} SET ${set} WHERE ${where}`;
queryCall(query, (...responce) => {
if(callback)
callback(...responce)
resolve(...responce)
})
})
const query = `UPDATE ${table} SET ${set} WHERE ${where}`;
queryCall(query, (...responce) => {
if(callback)
callback(...responce)
resolve(...responce)
})
})
return sphinx
return sphinx
}
const pool = () => {
let sphinx = mysql.createPool({
connectionLimit: config.sphinx.connectionLimit,
host : config.sphinx.host,
port : config.sphinx.port
});
return expand(sphinx)
let sphinx = mysql.createPool({
connectionLimit: config.sphinx.connectionLimit,
host : config.sphinx.host,
port : config.sphinx.port
});
return expand(sphinx)
}
let mysqlSingle;
const single = (callback) => {
mysqlSingle = mysql.createConnection({
host : config.sphinx.host,
port : config.sphinx.port
});
mysqlSingle = mysql.createConnection({
host : config.sphinx.host,
port : config.sphinx.port
});
let promiseResolve;
const connectionPromise = new Promise((resolve) => {
promiseResolve = resolve
})
mysqlSingle.waitConnection = () => connectionPromise;
let promiseResolve;
const connectionPromise = new Promise((resolve) => {
promiseResolve = resolve
})
mysqlSingle.waitConnection = () => connectionPromise;
mysqlSingle.connect((mysqlError) => {
if (mysqlError) {
console.error('error connecting: ' + mysqlError.stack);
return;
}
mysqlSingle.connect((mysqlError) => {
if (mysqlError) {
console.error('error connecting: ' + mysqlError.stack);
return;
}
if(callback)
callback(mysqlSingle)
if(callback)
callback(mysqlSingle)
promiseResolve(mysqlSingle)
});
promiseResolve(mysqlSingle)
});
mysqlSingle.on('error', (err) => {
console.log('db error', err);
if(err.code === 'PROTOCOL_CONNECTION_LOST') { // Connection to the MySQL server is usually
mysqlSingle = undefined
single(); // lost due to either server restart, or a
} else { // connnection idle timeout (the wait_timeout
throw err; // server variable configures this)
}
});
mysqlSingle.on('error', (err) => {
console.log('db error', err);
if(err.code === 'PROTOCOL_CONNECTION_LOST') { // Connection to the MySQL server is usually
mysqlSingle = undefined
single(); // lost due to either server restart, or a
} else { // connnection idle timeout (the wait_timeout
throw err; // server variable configures this)
}
});
mysqlSingle = expand(mysqlSingle)
return mysqlSingle
mysqlSingle = expand(mysqlSingle)
return mysqlSingle
}
module.exports = {pool, single}

View File

@ -127,7 +127,7 @@ class p2p {
// all ok don't need to start any ssh tunnels
if(isAvailable)
{
{
console.log('tcp p2p port is reachable - all ok')
return;
}
@ -152,7 +152,7 @@ class p2p {
this.externalPeers = []
return
}
console.log('ssh tunnel success, redirect peers to ssh')
this.p2pStatus = 1
@ -224,7 +224,7 @@ class p2p {
delete callbacks[message.id];
}
});
const emit = (type, data, callback) => {
const id = Math.random().toString(36).substring(5)
if(callback)
@ -301,7 +301,7 @@ class p2p {
console.log('close peer connection', address)
}
})
socket.on('error', (err) => {})
socket.connect(address.port, address.address);

View File

@ -22,16 +22,16 @@ const socketMessages = {}
io.on('connection', (socket) =>
{
for(const message in socketMessages)
{
socket.on(message, socketMessages[message])
}
for(const message in socketMessages)
{
socket.on(message, socketMessages[message])
}
})
sphinx = startSphinx(() => {
dbPatcher(() => {
spider = spiderCall((...data) => io.sockets.emit(...data), (message, callback) => {
socketMessages[message] = callback
}, path.resolve(packageJson.serverDataDirectory), packageJson.version, 'production')
}, null, sphinx)
dbPatcher(() => {
spider = spiderCall((...data) => io.sockets.emit(...data), (message, callback) => {
socketMessages[message] = callback
}, path.resolve(packageJson.serverDataDirectory), packageJson.version, 'production')
}, null, sphinx)
}, path.resolve(packageJson.serverDataDirectory), () => {})

View File

@ -3,12 +3,12 @@
* @param {Array} a items An array containing the items.
*/
module.exports = function shuffle(a) {
let j, x, i;
for (i = a.length - 1; i > 0; i--) {
j = Math.floor(Math.random() * (i + 1));
x = a[i];
a[i] = a[j];
a[j] = x;
}
return a
let j, x, i;
for (i = a.length - 1; i > 0; i--) {
j = Math.floor(Math.random() * (i + 1));
x = a[i];
a[i] = a[j];
a[j] = x;
}
return a
}

View File

@ -1,7 +1,7 @@
const path = require("path");
let env
try{
env = require("env");
env = require("env");
} catch(e){}
const appPath = require('./electronAppPath')
const fs = require('fs')
@ -10,7 +10,7 @@ const { spawn, exec } = require('child_process')
const appConfig = require('./config')
const writeSphinxConfig = (path, dbPath) => {
let config = `
let config = `
index torrents
{
type = rt
@ -92,115 +92,115 @@ const writeSphinxConfig = (path, dbPath) => {
}
`;
// clear dir in test env
if(env && env.name === 'test')
{
if (fs.existsSync(`${dbPath}/database`)) {
fs.readdirSync(`${dbPath}/database`).forEach(function(file, index){
const curPath = `${dbPath}/database` + "/" + file;
if (!fs.lstatSync(curPath).isDirectory()) {
fs.unlinkSync(curPath);
}
});
// clear dir in test env
if(env && env.name === 'test')
{
if (fs.existsSync(`${dbPath}/database`)) {
fs.readdirSync(`${dbPath}/database`).forEach(function(file, index){
const curPath = `${dbPath}/database` + "/" + file;
if (!fs.lstatSync(curPath).isDirectory()) {
fs.unlinkSync(curPath);
}
});
fs.readdirSync(path).forEach(function(file, index){
if(!file.startsWith('binlog'))
return;
const curPath = path + "/" + file;
if (!fs.lstatSync(curPath).isDirectory()) {
fs.unlinkSync(curPath);
}
});
}
}
fs.readdirSync(path).forEach(function(file, index){
if(!file.startsWith('binlog'))
return;
const curPath = path + "/" + file;
if (!fs.lstatSync(curPath).isDirectory()) {
fs.unlinkSync(curPath);
}
});
}
}
// clean query.log because it too large and don't consist any good info
if(fs.existsSync(`${path}/query.log`))
{
fs.unlinkSync(`${path}/query.log`)
}
// clean query.log because it too large and don't consist any good info
if(fs.existsSync(`${path}/query.log`))
{
fs.unlinkSync(`${path}/query.log`)
}
let isInitDb = false
let isInitDb = false
if (!fs.existsSync(`${dbPath}/database`)){
fs.mkdirSync(`${dbPath}/database`);
isInitDb = true
}
if (!fs.existsSync(`${dbPath}/database`)){
fs.mkdirSync(`${dbPath}/database`);
isInitDb = true
}
if(/^win/.test(process.platform))
config = iconv.encode(config, 'win1251')
if(/^win/.test(process.platform))
config = iconv.encode(config, 'win1251')
fs.writeFileSync(`${path}/sphinx.conf`, config)
console.log(`writed sphinx config to ${path}`)
console.log('db path:', dbPath)
fs.writeFileSync(`${path}/sphinx.conf`, config)
console.log(`writed sphinx config to ${path}`)
console.log('db path:', dbPath)
return {isInitDb}
return {isInitDb}
}
module.exports = (callback, dataDirectory, onClose) => {
const sphinxPath = path.resolve(appPath('searchd'))
console.log('Sphinx Path:', sphinxPath)
const sphinxPath = path.resolve(appPath('searchd'))
console.log('Sphinx Path:', sphinxPath)
const sphinxConfigDirectory = dataDirectory
appConfig['dbPath'] = appConfig.dbPath && appConfig.dbPath.length > 0 ? appConfig.dbPath : sphinxConfigDirectory;
// on portable dir can move database directory
if(!fs.existsSync(appConfig.dbPath) && fs.existsSync(sphinxConfigDirectory))
{
appConfig['dbPath'] = sphinxConfigDirectory
}
const sphinxConfigDirectory = dataDirectory
appConfig['dbPath'] = appConfig.dbPath && appConfig.dbPath.length > 0 ? appConfig.dbPath : sphinxConfigDirectory;
// on portable dir can move database directory
if(!fs.existsSync(appConfig.dbPath) && fs.existsSync(sphinxConfigDirectory))
{
appConfig['dbPath'] = sphinxConfigDirectory
}
const { isInitDb } = writeSphinxConfig(sphinxConfigDirectory, appConfig.dbPath)
const { isInitDb } = writeSphinxConfig(sphinxConfigDirectory, appConfig.dbPath)
const config = `${sphinxConfigDirectory}/sphinx.conf`
const options = ['--config', config]
if(!(/^win/.test(process.platform)))
{
options.push('--nodetach')
}
const sphinx = spawn(sphinxPath, options)
// remeber initizalizing of db
sphinx.isInitDb = isInitDb
sphinx.directoryPath = appConfig.dbPath
sphinx.directoryPathDb = appConfig.dbPath + '/database'
const config = `${sphinxConfigDirectory}/sphinx.conf`
const options = ['--config', config]
if(!(/^win/.test(process.platform)))
{
options.push('--nodetach')
}
const sphinx = spawn(sphinxPath, options)
// remeber initizalizing of db
sphinx.isInitDb = isInitDb
sphinx.directoryPath = appConfig.dbPath
sphinx.directoryPathDb = appConfig.dbPath + '/database'
const optimizeResolvers = {}
const optimizeResolvers = {}
sphinx.stdout.on('data', (data) => {
console.log(`sphinx: ${data}`)
if (data.includes('accepting connections')) {
console.log('catched sphinx start')
if(callback)
callback()
}
sphinx.stdout.on('data', (data) => {
console.log(`sphinx: ${data}`)
if (data.includes('accepting connections')) {
console.log('catched sphinx start')
if(callback)
callback()
}
const checkOptimized = String(data).match(/index ([\w]+): optimized/)
if(checkOptimized)
{
if(optimizeResolvers[checkOptimized[1]])
{
console.log('resolve optimizer', checkOptimized[1])
optimizeResolvers[checkOptimized[1]]()
}
}
})
const checkOptimized = String(data).match(/index ([\w]+): optimized/)
if(checkOptimized)
{
if(optimizeResolvers[checkOptimized[1]])
{
console.log('resolve optimizer', checkOptimized[1])
optimizeResolvers[checkOptimized[1]]()
}
}
})
sphinx.on('close', (code, signal) => {
console.log(`sphinx closed with code ${code} and signal ${signal}`)
if(onClose)
onClose()
})
sphinx.on('close', (code, signal) => {
console.log(`sphinx closed with code ${code} and signal ${signal}`)
if(onClose)
onClose()
})
sphinx.stop = () => {
console.log('sphinx closing...')
exec(`"${sphinxPath}" --config "${config}" --stopwait`)
}
sphinx.stop = () => {
console.log('sphinx closing...')
exec(`"${sphinxPath}" --config "${config}" --stopwait`)
}
sphinx.waitOptimized = (table) => new Promise((resolve) => {
optimizeResolvers[table] = () => {
delete optimizeResolvers[table];
resolve()
}
})
sphinx.waitOptimized = (table) => new Promise((resolve) => {
optimizeResolvers[table] = () => {
delete optimizeResolvers[table];
resolve()
}
})
return sphinx
return sphinx
}

File diff suppressed because it is too large Load Diff

View File

@ -13,7 +13,7 @@ const startSSH = (port, host, user, password, callback) => {
if(tryies-- <= 0)
{
if(callback)
callback(false)
callback(false)
return
}

View File

@ -3,184 +3,184 @@ const EventEmitter = require('events');
const forBigTable = require('./forBigTable')
module.exports = class P2PStore extends EventEmitter {
constructor(p2p, sphinx)
{
super()
this.id = 0
this.synchronized = false
constructor(p2p, sphinx)
{
super()
this.id = 0
this.synchronized = false
console.log('connect p2p store...')
this.p2p = p2p
this.sphinx = sphinx
console.log('connect p2p store...')
this.p2p = p2p
this.sphinx = sphinx
this.sphinx.query("SELECT MAX(`id`) as mx from store", (err, rows) => {
this.sphinx.query("SELECT MAX(`id`) as mx from store", (err, rows) => {
if(err)
return
if(rows[0] && rows[0].mx >= 1)
this.id = rows[0].mx;
this.id = rows[0].mx;
console.log('store db index', this.id)
console.log('store db index', this.id)
let lock = false
this.p2p.events.on('peer', () => {
if(lock)
return
lock = true
setTimeout(() => this.sync(), 1000)
})
let lock = false
this.p2p.events.on('peer', () => {
if(lock)
return
lock = true
setTimeout(() => this.sync(), 1000)
})
})
this.p2p.on('dbStore', (record) => {
if(!record || record.id - 1 !== this.id)
{
console.log('out of range peerdb store', record.id)
return
}
this.p2p.on('dbStore', (record) => {
if(!record || record.id - 1 !== this.id)
{
console.log('out of range peerdb store', record.id)
return
}
this._syncRecord(record, () => {
// redirect other peers that record are stored
this.p2p.emit('dbStore', record)
})
})
this._syncRecord(record, () => {
// redirect other peers that record are stored
this.p2p.emit('dbStore', record)
})
})
this.p2p.on('dbSync', ({id} = {}, callback) => {
console.log('ask to sync db from', id, 'version')
if(typeof id === 'undefined' || id >= this.id)
{
callback(false)
return
}
this.p2p.on('dbSync', ({id} = {}, callback) => {
console.log('ask to sync db from', id, 'version')
if(typeof id === 'undefined' || id >= this.id)
{
callback(false)
return
}
// back
this.sphinx.query(`select * from store where id > ${id}`, (err, records) => {
if(err)
{
console.log(err)
return
}
// back
this.sphinx.query(`select * from store where id > ${id}`, (err, records) => {
if(err)
{
console.log(err)
return
}
if(records.length > 0)
callback({
records,
index: this.id
})
})
})
}
if(records.length > 0)
callback({
records,
index: this.id
})
})
})
}
sync()
{
console.log('sync db on version', this.id)
const processSync = (data, nil, peer) => {
if(!data || !data.records)
return
sync()
{
console.log('sync db on version', this.id)
const processSync = (data, nil, peer) => {
if(!data || !data.records)
return
const oldIndex = this.id
data.records.forEach(record => this._syncRecord(record))
const oldIndex = this.id
data.records.forEach(record => this._syncRecord(record))
// peer also can contain another part of store records, try to sync them all
if(data.index >= 0
// peer also can contain another part of store records, try to sync them all
if(data.index >= 0
&& oldIndex < this.id // last sync update of store must be successful, otherwise no point to try sync db from this peer
&& this.id < data.index)
{
console.log('continue sync store from', this.id, 'index', 'peer', peer.peerId)
peer.emit('dbSync', {id: this.id}, processSync)
}
}
this.p2p.emit('dbSync', {id: this.id}, processSync)
this.synchronized = true
}
{
console.log('continue sync store from', this.id, 'index', 'peer', peer.peerId)
peer.emit('dbSync', {id: this.id}, processSync)
}
}
this.p2p.emit('dbSync', {id: this.id}, processSync)
this.synchronized = true
}
_syncRecord(record, callback)
{
if(!record)
return
_syncRecord(record, callback)
{
if(!record)
return
if(!record.id)
return
if(!record.id)
return
if(record.id <= this.id)
return
if(record.id <= this.id)
return
if(typeof record.data !== 'object')
record.data = JSON.parse(record.data)
if(typeof record.data !== 'object')
record.data = JSON.parse(record.data)
// check hash
if(objectHash(record.data) !== record.hash)
{
console.log('wrong hash for sync peerdb')
return
}
// check hash
if(objectHash(record.data) !== record.hash)
{
console.log('wrong hash for sync peerdb')
return
}
// set myself to false
record.myself = false
// set myself to false
record.myself = false
// push to db
console.log('sync peerdb record', record.id)
this._pushToDb(record)
this.id = record.id
// push to db
console.log('sync peerdb record', record.id)
this._pushToDb(record)
this.id = record.id
// redirect to next
if(callback)
callback()
}
// redirect to next
if(callback)
callback()
}
_pushToDb(value, callback)
{
const data = this.sphinx.escape(JSON.stringify(value.data))
this.sphinx.query(
`insert into store(id, hash, peerId, data` + (value.index || value.data._index ? ', storeIndex' : '') + `)
_pushToDb(value, callback)
{
const data = this.sphinx.escape(JSON.stringify(value.data))
this.sphinx.query(
`insert into store(id, hash, peerId, data` + (value.index || value.data._index ? ', storeIndex' : '') + `)
values('${value.id}', '${value.hash}', '${value.peerId || value.peerid}', ${data}` + (value.index || value.data._index ? ',' + this.sphinx.escape(value.index || value.data._index) : '') + ')',
(err) => {
if(err)
{
console.log(err)
return
}
(err) => {
if(err)
{
console.log(err)
return
}
if(callback)
callback()
})
this.emit('store', value)
}
if(callback)
callback()
})
this.emit('store', value)
}
store(obj)
{
if(!this.synchronized)
{
console.log('cant store item on unsync db')
return false
}
store(obj)
{
if(!this.synchronized)
{
console.log('cant store item on unsync db')
return false
}
// clean temp from object
const temp = obj._temp
delete obj._temp
// clean temp from object
const temp = obj._temp
delete obj._temp
const value = {
id: ++this.id,
hash: objectHash(obj),
data: obj,
index: obj._index,
peerId: this.p2p.peerId,
myself: true,
temp
}
const value = {
id: ++this.id,
hash: objectHash(obj),
data: obj,
index: obj._index,
peerId: this.p2p.peerId,
myself: true,
temp
}
console.log('store object', value.id)
console.log('store object', value.id)
this._pushToDb(value, () => {
// store record
this.p2p.emit('dbStore', value)
})
this._pushToDb(value, () => {
// store record
this.p2p.emit('dbStore', value)
})
return true
}
return true
}
async find(index)
{
const records = []
await forBigTable(this.sphinx, 'store', (record) => records.push(record), null, 1000, `and match(${this.sphinx.escape(index)})`)
return records.map( ({data, peerid}) => Object.assign(JSON.parse(data), { _peerId: peerid }) )
}
async find(index)
{
const records = []
await forBigTable(this.sphinx, 'store', (record) => records.push(record), null, 1000, `and match(${this.sphinx.escape(index)})`)
return records.map( ({data, peerid}) => Object.assign(JSON.parse(data), { _peerId: peerid }) )
}
}

View File

@ -5,85 +5,85 @@ const glob = require('glob')
const path = require('path')
module.exports = {
mode: 'development',
//mode: 'production',
entry: path.resolve("src/app/index.js"),
output: {
path: path.resolve('web'),
},
module: {
rules: [
{
test: /\.js$/,
exclude: /node_modules/,
use: ["babel-loader"]
},
{
test: /\.css$/,
use: ["style-loader", "css-loader"]
},
{
test: /\.(?:ico|gif|png|jpg|jpeg|webp)$/,
use: ['url-loader']
}
]
},
plugins: [
new HtmlWebpackPlugin({
inject: true,
template: 'app/app.html',
minify: {
removeComments: true,
collapseWhitespace: true,
removeRedundantAttributes: true,
useShortDoctype: true,
removeEmptyAttributes: true,
removeStyleLinkTypeAttributes: true,
keepClosingSlash: true,
minifyJS: true,
minifyCSS: true,
minifyURLs: true,
},
}),
new webpack.DefinePlugin({WEB: true}),
// Generate a service worker script that will precache, and keep up to date,
// the HTML & assets that are part of the Webpack build.
new SWPrecacheWebpackPlugin({
// By default, a cache-busting query parameter is appended to requests
// used to populate the caches, to ensure the responses are fresh.
// If a URL is already hashed by Webpack, then there is no concern
// about it being stale, and the cache-busting can be skipped.
dontCacheBustUrlsMatching: /\.\w{8}\./,
filename: 'service-worker.js',
logger(message) {
if (message.indexOf('Total precache size is') === 0) {
// This message occurs for every build and is a bit too noisy.
return;
}
if (message.indexOf('Skipping static resource') === 0) {
// This message obscures real errors so we ignore it.
// https://github.com/facebookincubator/create-react-app/issues/2612
return;
}
console.log(message);
},
minify: true,
// For unknown URLs, fallback to the index page
navigateFallback: 'index.html',
// Ignores URLs starting from /__ (useful for Firebase):
// https://github.com/facebookincubator/create-react-app/issues/2237#issuecomment-302693219
navigateFallbackWhitelist: [/^(?!\/__).*/],
// Don't precache sourcemaps (they're large) and build asset manifest:
staticFileGlobsIgnorePatterns: [/\.map$/, /asset-manifest\.json$/],
mergeStaticsConfig: true,
staticFileGlobs: glob.sync('public/images/**/*.*').concat(glob.sync('public/sounds/**/*.*')),
stripPrefix: 'public/',
}),
],
node: {
dgram: 'empty',
fs: 'empty',
net: 'empty',
tls: 'empty',
},
mode: 'development',
//mode: 'production',
entry: path.resolve("src/app/index.js"),
output: {
path: path.resolve('web'),
},
module: {
rules: [
{
test: /\.js$/,
exclude: /node_modules/,
use: ["babel-loader"]
},
{
test: /\.css$/,
use: ["style-loader", "css-loader"]
},
{
test: /\.(?:ico|gif|png|jpg|jpeg|webp)$/,
use: ['url-loader']
}
]
},
plugins: [
new HtmlWebpackPlugin({
inject: true,
template: 'app/app.html',
minify: {
removeComments: true,
collapseWhitespace: true,
removeRedundantAttributes: true,
useShortDoctype: true,
removeEmptyAttributes: true,
removeStyleLinkTypeAttributes: true,
keepClosingSlash: true,
minifyJS: true,
minifyCSS: true,
minifyURLs: true,
},
}),
new webpack.DefinePlugin({WEB: true}),
// Generate a service worker script that will precache, and keep up to date,
// the HTML & assets that are part of the Webpack build.
new SWPrecacheWebpackPlugin({
// By default, a cache-busting query parameter is appended to requests
// used to populate the caches, to ensure the responses are fresh.
// If a URL is already hashed by Webpack, then there is no concern
// about it being stale, and the cache-busting can be skipped.
dontCacheBustUrlsMatching: /\.\w{8}\./,
filename: 'service-worker.js',
logger(message) {
if (message.indexOf('Total precache size is') === 0) {
// This message occurs for every build and is a bit too noisy.
return;
}
if (message.indexOf('Skipping static resource') === 0) {
// This message obscures real errors so we ignore it.
// https://github.com/facebookincubator/create-react-app/issues/2612
return;
}
console.log(message);
},
minify: true,
// For unknown URLs, fallback to the index page
navigateFallback: 'index.html',
// Ignores URLs starting from /__ (useful for Firebase):
// https://github.com/facebookincubator/create-react-app/issues/2237#issuecomment-302693219
navigateFallbackWhitelist: [/^(?!\/__).*/],
// Don't precache sourcemaps (they're large) and build asset manifest:
staticFileGlobsIgnorePatterns: [/\.map$/, /asset-manifest\.json$/],
mergeStaticsConfig: true,
staticFileGlobs: glob.sync('public/images/**/*.*').concat(glob.sync('public/sounds/**/*.*')),
stripPrefix: 'public/',
}),
],
node: {
dgram: 'empty',
fs: 'empty',
net: 'empty',
tls: 'empty',
},
};

View File

@ -3,12 +3,12 @@ const webpack = require('webpack');
let compiler = webpack(config);
compiler.run((err, stats) => {
if(err)
throw new Error(err)
if(err)
throw new Error(err)
if(stats.compilation.errors && stats.compilation.errors.length > 0)
console.error('compilation errors', stats.compilation.errors)
else
console.log('succesfully builded web version')
if(stats.compilation.errors && stats.compilation.errors.length > 0)
console.error('compilation errors', stats.compilation.errors)
else
console.log('succesfully builded web version')
})