Merge branch 'master' into services

This commit is contained in:
Alexey Kasyanchuk 2018-08-14 09:22:12 +03:00
commit b4a17bc090
27 changed files with 404 additions and 213 deletions

View File

@ -11,6 +11,7 @@
"react" "react"
], ],
"rules": { "rules": {
"indent": ["error", "tab"] "indent": ["error", "tab"],
"space-infix-ops": ["error", {"int32Hint": false}]
} }
} }

33
package-lock.json generated
View File

@ -1,6 +1,6 @@
{ {
"name": "rats-search", "name": "rats-search",
"version": "0.27.0", "version": "0.28.0",
"lockfileVersion": 1, "lockfileVersion": 1,
"requires": true, "requires": true,
"dependencies": { "dependencies": {
@ -10571,6 +10571,11 @@
"integrity": "sha1-EaBgVotnM5REAz0BJaYaINVk+zQ=", "integrity": "sha1-EaBgVotnM5REAz0BJaYaINVk+zQ=",
"dev": true "dev": true
}, },
"is-running": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/is-running/-/is-running-2.1.0.tgz",
"integrity": "sha1-MKc/9cw4VOT8JUkICen1q/jeCeA="
},
"is-scoped": { "is-scoped": {
"version": "1.0.0", "version": "1.0.0",
"resolved": "https://registry.npmjs.org/is-scoped/-/is-scoped-1.0.0.tgz", "resolved": "https://registry.npmjs.org/is-scoped/-/is-scoped-1.0.0.tgz",
@ -10674,6 +10679,17 @@
"requires": { "requires": {
"node-fetch": "^1.0.1", "node-fetch": "^1.0.1",
"whatwg-fetch": ">=0.10.0" "whatwg-fetch": ">=0.10.0"
},
"dependencies": {
"node-fetch": {
"version": "1.7.3",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-1.7.3.tgz",
"integrity": "sha512-NhZ4CsKx7cYm2vSrBAr2PvFOe6sWDf0UYLRqA6svUYg7+/TSfVAu49jYC4BvQ4Sms9SZgdqGBgroqfDhJdTyKQ==",
"requires": {
"encoding": "^0.1.11",
"is-stream": "^1.0.1"
}
}
} }
}, },
"isstream": { "isstream": {
@ -11912,9 +11928,9 @@
} }
}, },
"mime": { "mime": {
"version": "2.2.0", "version": "2.3.1",
"resolved": "https://registry.npmjs.org/mime/-/mime-2.2.0.tgz", "resolved": "https://registry.npmjs.org/mime/-/mime-2.3.1.tgz",
"integrity": "sha512-0Qz9uF1ATtl8RKJG4VRfOymh7PyEor6NbrI/61lRfuRe4vx9SNATrvAeTj2EWVRKjEQGskrzWkJBBY5NbaVHIA==" "integrity": "sha512-OEUllcVoydBHGN1z84yfQDimn58pZNNNXgZlHXSboxMlFvgI6MXSWpWKpFRra7H1HxpVhHTkrghfRW49k6yjeg=="
}, },
"mime-db": { "mime-db": {
"version": "1.30.0", "version": "1.30.0",
@ -12413,15 +12429,6 @@
"integrity": "sha1-VfuN62mQcHB/tn+RpGDwRIKUx30=", "integrity": "sha1-VfuN62mQcHB/tn+RpGDwRIKUx30=",
"dev": true "dev": true
}, },
"node-fetch": {
"version": "1.7.3",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-1.7.3.tgz",
"integrity": "sha512-NhZ4CsKx7cYm2vSrBAr2PvFOe6sWDf0UYLRqA6svUYg7+/TSfVAu49jYC4BvQ4Sms9SZgdqGBgroqfDhJdTyKQ==",
"requires": {
"encoding": "^0.1.11",
"is-stream": "^1.0.1"
}
},
"node-libs-browser": { "node-libs-browser": {
"version": "2.1.0", "version": "2.1.0",
"resolved": "https://registry.npmjs.org/node-libs-browser/-/node-libs-browser-2.1.0.tgz", "resolved": "https://registry.npmjs.org/node-libs-browser/-/node-libs-browser-2.1.0.tgz",

View File

@ -125,9 +125,11 @@
"google": "^2.1.0", "google": "^2.1.0",
"iconv-lite": "^0.4.19", "iconv-lite": "^0.4.19",
"ipaddr.js": "^1.5.4", "ipaddr.js": "^1.5.4",
"is-running": "^2.1.0",
"json-socket": "^0.3.0", "json-socket": "^0.3.0",
"lodash": "^4.17.5", "lodash": "^4.17.5",
"material-ui": "^0.20.0", "material-ui": "^0.20.0",
"mime": "^2.3.1",
"moment": "^2.20.1", "moment": "^2.20.1",
"mysql": "^2.15.0", "mysql": "^2.15.0",
"nat-upnp": "^1.1.1", "nat-upnp": "^1.1.1",

View File

@ -151,19 +151,19 @@ class App extends Component {
if(!files || files.length == 0) if(!files || files.length == 0)
return return
torrentSocket.emit('dropTorrents', Array.from(files).filter(file => file.type == 'application/x-bittorrent').map(file => file.path)) torrentSocket.emit('dropTorrents', Array.from(files).filter(file => (file.type == 'application/x-bittorrent' || file.type == '')).map(file => file.path))
} }
document.addEventListener('dragover', (event) => { document.addEventListener('dragover', (event) => {
event.stopPropagation(); event.stopPropagation();
event.preventDefault();
event.dataTransfer.dropEffect = 'copy'; // Explicitly show this is a copy.
}, false);
document.addEventListener('drop', (event) => {
event.stopPropagation();
event.preventDefault(); event.preventDefault();
processTorrents(event.dataTransfer.files); // FileList object. event.dataTransfer.dropEffect = 'copy'; // Explicitly show this is a copy.
}, false); }, false);
document.addEventListener('drop', (event) => {
event.stopPropagation();
event.preventDefault();
processTorrents(event.dataTransfer.files); // FileList object.
}, false);
window.router() window.router()
appReady = true; appReady = true;

View File

@ -249,7 +249,7 @@ export default class ConfigPage extends Page {
null null
} }
{ {
this.removed > 0 && !this.toRemove && !this.toRemoveProbably this.removed > 0 && !this.toRemove && !this.toRemoveProbably
? ?
<div style={{color: 'purple'}}>{this.realRemove ? __('removing') : __('calculation')}...: {this.removed}{this.removedMax > 0 ? '/' + this.removedMax : ''}</div> <div style={{color: 'purple'}}>{this.realRemove ? __('removing') : __('calculation')}...: {this.removed}{this.removedMax > 0 ? '/' + this.removedMax : ''}</div>
: :

View File

@ -58,13 +58,13 @@ class PagesPie extends Component {
{ {
this.pie.map(({Page, params}, index) => { this.pie.map(({Page, params}, index) => {
let focus = false; let focus = false;
if (index === this.pie.length-1) { if (index === this.pie.length - 1) {
focus = true; focus = true;
} }
return ( return (
<Page <Page
focused={focus} focused={focus}
closeHandler={() => { index> 0 ? this.close() : null}} closeHandler={() => { index > 0 ? this.close() : null}}
index={index} index={index}
key={index} key={index}
ref={index} ref={index}

View File

@ -287,6 +287,7 @@ class Search extends Component {
floatingLabelText={__('What to search?')} floatingLabelText={__('What to search?')}
fullWidth={true} fullWidth={true}
ref='searchInput' ref='searchInput'
id='searchInput'
defaultValue={this.searchValue} defaultValue={this.searchValue}
errorText={this.searchError} errorText={this.searchError}
onKeyPress={(e) => { onKeyPress={(e) => {
@ -335,7 +336,7 @@ class Search extends Component {
</Tooltip> </Tooltip>
</div> </div>
<RaisedButton style={{marginLeft: '10px'}} label={__('Search')} primary={true} onClick={() =>{ <RaisedButton style={{marginLeft: '10px'}} id='search' label={__('Search')} primary={true} onClick={() =>{
this.search() this.search()
}} /> }} />
</div> </div>

View File

@ -187,7 +187,7 @@ export default class Torrent extends Component {
componentDidMount() componentDidMount()
{ {
scrollBack() scrollBack()
this.downloading = (hash) => { this.downloading = (hash) => {
if(this.props.torrent.hash != hash) if(this.props.torrent.hash != hash)
@ -261,7 +261,8 @@ export default class Torrent extends Component {
return ( return (
<div> <div>
<ListItem <ListItem
className='torrentRow'
innerDivStyle={{paddingRight: 84}} innerDivStyle={{paddingRight: 84}}
onClick={(e) => { onClick={(e) => {
const link = '/torrent/' + torrent.hash; const link = '/torrent/' + torrent.hash;
@ -275,7 +276,7 @@ export default class Torrent extends Component {
return true; return true;
} }
*/ */
window.rememberYOffset = window.pageYOffset window.rememberYOffset = window.pageYOffset
window.routerFix() window.routerFix()
PagesPie.instance().open(TorrentPage, {replace: 'all', hash: torrent.hash, peer: torrent.peer}) PagesPie.instance().open(TorrentPage, {replace: 'all', hash: torrent.hash, peer: torrent.peer})
}} }}
@ -284,7 +285,7 @@ export default class Torrent extends Component {
if(node) if(node)
node.onclick = () => { return false } node.onclick = () => { return false }
}}> }}>
<span className='break-word' style={{ <span className='break-word torrentName' style={{
color: torrent.contentCategory != 'xxx' ? (torrent.peer ? '#5643db' : 'black') : (torrent.peer ? '#9083e2' : 'grey') color: torrent.contentCategory != 'xxx' ? (torrent.peer ? '#5643db' : 'black') : (torrent.peer ? '#9083e2' : 'grey')
}}> }}>
{torrent.name} {torrent.name}

View File

@ -1003,7 +1003,7 @@ module.exports = async ({
if(peer.info && peer.info.feed) if(peer.info && peer.info.feed)
{ {
if(peer.info.feed > feed.size() // list bigger than our if(peer.info.feed > feed.size() // list bigger than our
|| (peer.info.feed == feed.size() && peer.info.feedDate > feed.feedDate)) // or same but more new || (peer.info.feed == feed.size() && peer.info.feedDate > feed.feedDate)) // or same but more new
{ {
peer.emit('feed', null, (remoteFeed) => { peer.emit('feed', null, (remoteFeed) => {
if(!remoteFeed) if(!remoteFeed)

View File

@ -0,0 +1 @@
module.exports = (time) => new Promise((resolve) => setTimeout(resolve, time))

View File

@ -84,9 +84,9 @@ const stringHashCode = (str) => {
if (str.length === 0) if (str.length === 0)
return hash; return hash;
for (i = 0; i < str.length; i++) { for (i = 0; i < str.length; i++) {
chr = str.charCodeAt(i); chr = str.charCodeAt(i);
hash = ((hash << 5) - hash) + chr; hash = ((hash << 5) - hash) + chr;
hash |= 0; // Convert to 32bit integer hash |= 0; // Convert to 32bit integer
} }
return hash; return hash;
}; };
@ -171,8 +171,8 @@ autoUpdater.on('update-downloaded', () => {
let tray = undefined let tray = undefined
app.on("ready", () => { app.on("ready", async () => {
sphinx = startSphinx(() => { sphinx = await startSphinx(() => {
mainWindow = createWindow("main", { mainWindow = createWindow("main", {
width: 1000, width: 1000,
@ -367,3 +367,8 @@ rl.on("SIGINT", function () {
process.on("SIGINT", () => { process.on("SIGINT", () => {
stop() stop()
}); });
process.on("exit", () => {
if(spider)
spider.preventNetworkOnExit = true
})

View File

@ -3,27 +3,27 @@ var os = require("os");
//Create function to get CPU information //Create function to get CPU information
function cpuAverage() { function cpuAverage() {
//Initialise sum of idle and time of cores and fetch CPU info //Initialise sum of idle and time of cores and fetch CPU info
let totalIdle = 0, totalTick = 0; let totalIdle = 0, totalTick = 0;
const cpus = os.cpus(); const cpus = os.cpus();
//Loop through CPU cores //Loop through CPU cores
for(let i = 0, len = cpus.length; i < len; i++) { for(let i = 0, len = cpus.length; i < len; i++) {
//Select CPU core //Select CPU core
const cpu = cpus[i]; const cpu = cpus[i];
//Total up the time in the cores tick //Total up the time in the cores tick
for(const type in cpu.times) { for(const type in cpu.times) {
totalTick += cpu.times[type]; totalTick += cpu.times[type];
} }
//Total up the idle time of the core //Total up the idle time of the core
totalIdle += cpu.times.idle; totalIdle += cpu.times.idle;
} }
//Return the average Idle and Tick times //Return the average Idle and Tick times
return {idle: totalIdle / cpus.length, total: totalTick / cpus.length}; return {idle: totalIdle / cpus.length, total: totalTick / cpus.length};
} }
//Grab first CPU Measure //Grab first CPU Measure
@ -33,16 +33,16 @@ let percentageCPU = 0
//Set delay for second Measure //Set delay for second Measure
const cpuTimer = setInterval(function() { const cpuTimer = setInterval(function() {
//Grab second Measure //Grab second Measure
const endMeasure = cpuAverage(); const endMeasure = cpuAverage();
//Calculate the difference in idle and total time between the measures //Calculate the difference in idle and total time between the measures
const idleDifference = endMeasure.idle - startMeasure.idle; const idleDifference = endMeasure.idle - startMeasure.idle;
const totalDifference = endMeasure.total - startMeasure.total; const totalDifference = endMeasure.total - startMeasure.total;
//Calculate the average percentage CPU usage //Calculate the average percentage CPU usage
percentageCPU = 100 - ~~(100 * idleDifference / totalDifference); percentageCPU = 100 - ~~(100 * idleDifference / totalDifference);
startMeasure = endMeasure startMeasure = endMeasure
}, 300); }, 300);

View File

@ -33,7 +33,7 @@ function generateTid() {
class Spider extends Emiter { class Spider extends Emiter {
constructor(client) { constructor(client) {
super() super()
const options = arguments.length? arguments[0]: {} const options = arguments.length ? arguments[0] : {}
this.table = new Table(options.tableCaption || 1000) this.table = new Table(options.tableCaption || 1000)
this.bootstraps = options.bootstraps || bootstraps this.bootstraps = options.bootstraps || bootstraps
this.token = new Token() this.token = new Token()

View File

@ -3,7 +3,7 @@
module.exports = class { module.exports = class {
constructor() { constructor() {
this.generate() this.generate()
const it = setInterval(() => this.generate(), 60000*15) const it = setInterval(() => this.generate(), 60000 * 15)
it.unref() it.unref()
} }
@ -12,6 +12,6 @@ module.exports = class {
} }
generate() { generate() {
this.token = new Buffer([parseInt(Math.random()*200), parseInt(Math.random()*200)]) this.token = new Buffer([parseInt(Math.random() * 200), parseInt(Math.random() * 200)])
} }
} }

View File

@ -24,7 +24,7 @@ let connectTracker = function(connection) {
debug('start screape connection'); debug('start screape connection');
let buffer = new Buffer(16); let buffer = new Buffer(16);
const transactionId = Math.floor((Math.random()*100000)+1); const transactionId = Math.floor((Math.random() * 100000) + 1);
buffer.fill(0); buffer.fill(0);

View File

@ -28,6 +28,7 @@ let config = {
sphinx: { sphinx: {
host : '127.0.0.1', host : '127.0.0.1',
port : 9306, port : 9306,
interfacePort: 9312,
connectionLimit: 10 connectionLimit: 10
}, },
@ -107,7 +108,7 @@ const configProxy = new Proxy(config, {
}) })
config.load = () => { config.load = () => {
debug('loading configuration') debug('loading configuration', configPath)
if(fs.existsSync(configPath)) if(fs.existsSync(configPath))
{ {
debug('finded configuration', configPath) debug('finded configuration', configPath)
@ -133,4 +134,9 @@ config.load = () => {
return configProxy return configProxy
} }
config.reload = (path) => {
configPath = path + '/rats.json'
return config.load()
}
module.exports = configProxy.load() module.exports = configProxy.load()

View File

@ -12,7 +12,7 @@ const getTorrent = require('./gettorrent')
const startSphinx = require('./sphinx') const startSphinx = require('./sphinx')
const currentVersion = 5 const currentVersion = 6
module.exports = async (callback, mainWindow, sphinxApp) => { module.exports = async (callback, mainWindow, sphinxApp) => {
@ -104,6 +104,112 @@ module.exports = async (callback, mainWindow, sphinxApp) => {
const patch = async (version) => { const patch = async (version) => {
logT('patcher', 'db version', version) logT('patcher', 'db version', version)
const rebuildTorrentsFull = async () => {
if(sphinxApp.isExternal)
{
logTE('patcher', 'this patch avaiable only not on external db')
throw new Error('this patch avaiable only not on external db')
}
let i = 1
const torrents = (await sphinx.query("SELECT COUNT(*) AS c FROM torrents"))[0].c
let torrentsArray = []
let patch = 1
await forBigTable(sphinx, 'torrents', async (torrent) => {
logT('patcher', 'remember index', torrent.id, torrent.name, '[', i, 'of', torrents, ']')
if(patchWindow)
patchWindow.webContents.send('reindex', {field: torrent.name, index: i++, all: torrents, torrent: true})
torrentsArray.push(torrent)
// keep memory safe
if(torrentsArray.length >= 20000)
{
fs.writeFileSync(`${sphinxApp.directoryPath}/torrents.patch.${patch++}`, JSON.stringify(torrentsArray, null, 4), 'utf8');
logT('patcher', 'write torrents dump', `${sphinxApp.directoryPath}/torrents.patch.${patch - 1}`)
torrentsArray = []
}
})
// keep last elemets
if(torrentsArray.length > 0)
{
fs.writeFileSync(`${sphinxApp.directoryPath}/torrents.patch.${patch}`, JSON.stringify(torrentsArray, null, 4), 'utf8');
logT('patcher', 'write torrents dump', `${sphinxApp.directoryPath}/torrents.patch.${patch}`)
torrentsArray = []
}
else
{
patch-- //no last patch
}
// stop sphinx
await new Promise((resolve) => {
// reopen sphinx
sphinx.destroy() // destory connection
sphinxApp.stop(resolve, true)
})
logT('patcher', 'sphinx stoped for patching')
await new Promise((resolve) => {
glob(`${sphinxApp.directoryPathDb}/torrents.*`, function (er, files) {
files.forEach(file => {
logT('patcher', 'clear torrents file', file)
fs.unlinkSync(path.resolve(file))
})
resolve()
})
})
logT('patcher', 'cleaned torrents db structure, rectreating again')
i = 1
await new Promise(async (resolve) => {
// reopen sphinx
sphinxApp = await sphinxApp.start(async () => {
sphinx = await single().waitConnection()
resolve()
}) // same args
})
logT('patcher', 'sphinx restarted, patch db now')
for(let k = 1; k <= patch; k++)
{
torrentsArray = JSON.parse(fs.readFileSync(`${sphinxApp.directoryPath}/torrents.patch.${k}`, 'utf8'))
logT('patcher', 'read torrents dump', `${sphinxApp.directoryPath}/torrents.patch.${k}`)
await asyncForEach(torrentsArray, async (torrent) => {
logT('patcher', 'update index', torrent.id, torrent.name, '[', i, 'of', torrents, ']')
if(patchWindow)
patchWindow.webContents.send('reindex', {field: torrent.name, index: i++, all: torrents, torrent: true})
torrent.nameIndex = torrent.name
await sphinx.query(`DELETE FROM torrents WHERE id = ${torrent.id}`)
await sphinx.insertValues('torrents', torrent)
})
}
await new Promise((resolve) => {
glob(`${sphinxApp.directoryPath}/torrents.patch.*`, function (er, files) {
files.forEach(file => {
logT('patcher', 'clear dump file', file)
fs.unlinkSync(path.resolve(file))
})
resolve()
})
})
torrentsArray = null
logT('patcher', 'optimizing torrents')
if(patchWindow)
patchWindow.webContents.send('optimize', {field: 'torrents'})
sphinx.query(`OPTIMIZE INDEX torrents`)
await sphinxApp.waitOptimized('torrents')
}
switch(version) switch(version)
{ {
case 1: case 1:
@ -192,105 +298,15 @@ module.exports = async (callback, mainWindow, sphinxApp) => {
case 4: case 4:
{ {
openPatchWindow() openPatchWindow()
await rebuildTorrentsFull()
let i = 1
const torrents = (await sphinx.query("SELECT COUNT(*) AS c FROM torrents"))[0].c
let torrentsArray = []
let patch = 1
await forBigTable(sphinx, 'torrents', async (torrent) => {
logT('patcher', 'remember index', torrent.id, torrent.name, '[', i, 'of', torrents, ']')
if(patchWindow)
patchWindow.webContents.send('reindex', {field: torrent.name, index: i++, all: torrents, torrent: true})
torrentsArray.push(torrent)
// keep memory safe
if(torrentsArray.length >= 20000)
{
fs.writeFileSync(`${sphinxApp.directoryPath}/torrents.patch.${patch++}`, JSON.stringify(torrentsArray, null, 4), 'utf8');
logT('patcher', 'write torrents dump', `${sphinxApp.directoryPath}/torrents.patch.${patch-1}`)
torrentsArray = []
}
})
// keep last elemets
if(torrentsArray.length > 0)
{
fs.writeFileSync(`${sphinxApp.directoryPath}/torrents.patch.${patch}`, JSON.stringify(torrentsArray, null, 4), 'utf8');
logT('patcher', 'write torrents dump', `${sphinxApp.directoryPath}/torrents.patch.${patch}`)
torrentsArray = []
}
else
{
patch-- //no last patch
}
// stop sphinx
await new Promise((resolve) => {
// reopen sphinx
sphinx.destroy() // destory connection
sphinxApp.stop(resolve, true)
})
logT('patcher', 'sphinx stoped for patching')
await new Promise((resolve) => {
glob(`${sphinxApp.directoryPathDb}/torrents.*`, function (er, files) {
files.forEach(file => {
logT('patcher', 'clear torrents file', file)
fs.unlinkSync(path.resolve(file))
})
resolve()
})
})
logT('patcher', 'cleaned torrents db structure, rectreating again')
i = 1
await new Promise((resolve) => {
// reopen sphinx
sphinxApp = sphinxApp.start(async () => {
sphinx = await single().waitConnection()
resolve()
}) // same args
})
logT('patcher', 'sphinx restarted, patch db now')
for(let k = 1; k <= patch; k++)
{
torrentsArray = JSON.parse(fs.readFileSync(`${sphinxApp.directoryPath}/torrents.patch.${k}`, 'utf8'))
logT('patcher', 'read torrents dump', `${sphinxApp.directoryPath}/torrents.patch.${k}`)
await asyncForEach(torrentsArray, async (torrent) => {
logT('patcher', 'update index', torrent.id, torrent.name, '[', i, 'of', torrents, ']')
if(patchWindow)
patchWindow.webContents.send('reindex', {field: torrent.name, index: i++, all: torrents, torrent: true})
torrent.nameIndex = torrent.name
await sphinx.query(`DELETE FROM torrents WHERE id = ${torrent.id}`)
await sphinx.insertValues('torrents', torrent)
})
}
await new Promise((resolve) => {
glob(`${sphinxApp.directoryPath}/torrents.patch.*`, function (er, files) {
files.forEach(file => {
logT('patcher', 'clear dump file', file)
fs.unlinkSync(path.resolve(file))
})
resolve()
})
})
torrentsArray = null
logT('patcher', 'optimizing torrents')
if(patchWindow)
patchWindow.webContents.send('optimize', {field: 'torrents'})
sphinx.query(`OPTIMIZE INDEX torrents`)
await sphinxApp.waitOptimized('torrents')
await setVersion(5) await setVersion(5)
} }
case 5:
{
openPatchWindow()
await rebuildTorrentsFull()
await setVersion(6)
}
} }
logT('patcher', 'db patch done') logT('patcher', 'db patch done')
sphinx.destroy() sphinx.destroy()

View File

@ -0,0 +1,24 @@
const fs = require('fs')
function directoryFilesRecursive (directory, filesList = []) {
let files;
try {
files = fs.readdirSync(directory)
} catch(err) {
if(err.code !== 'ENOTDIR')
throw err
else
return [directory] // if file, return file
}
for (const file of files) {
const filePath = `${directory}/${file}`
if (fs.statSync(filePath).isDirectory()) {
directoryFilesRecursive(filePath, filesList)
} else {
filesList.push(filePath)
}
}
return filesList
}
module.exports = directoryFilesRecursive

View File

@ -0,0 +1,11 @@
const net = require('net')
module.exports = (port, host = '127.0.0.1') => new Promise((resolve, reject) => {
const tester = net.createServer()
.once('error', err => (err.code === 'EADDRINUSE' ? resolve(false) : reject(err)))
.once('listening', () => tester.once('close', () => resolve(true)).close())
.listen({
host,
port
})
})

View File

@ -21,9 +21,9 @@ const stringHashCode = (str) => {
if (str.length === 0) if (str.length === 0)
return hash; return hash;
for (i = 0; i < str.length; i++) { for (i = 0; i < str.length; i++) {
chr = str.charCodeAt(i); chr = str.charCodeAt(i);
hash = ((hash << 5) - hash) + chr; hash = ((hash << 5) - hash) + chr;
hash |= 0; // Convert to 32bit integer hash |= 0; // Convert to 32bit integer
} }
return hash; return hash;
}; };
@ -68,14 +68,17 @@ io.on('connection', (socket) =>
} }
}) })
sphinx = startSphinx(() => { const start = async () =>
dbPatcher(() => { {
spider = new spiderCall((...data) => io.sockets.emit(...data), (message, callback) => { sphinx = await startSphinx(() => {
socketMessages[message] = callback dbPatcher(() => {
}, path.resolve(packageJson.serverDataDirectory), packageJson.version, 'production') spider = new spiderCall((...data) => io.sockets.emit(...data), (message, callback) => {
}, null, sphinx) socketMessages[message] = callback
}, path.resolve(packageJson.serverDataDirectory), () => {}) }, path.resolve(packageJson.serverDataDirectory), packageJson.version, 'production')
}, null, sphinx)
}, path.resolve(packageJson.serverDataDirectory), () => {})
}
start()
var rl = require("readline").createInterface({ var rl = require("readline").createInterface({
input: process.stdin, input: process.stdin,
@ -92,8 +95,12 @@ process.on("SIGINT", () => {
{ {
spider.stop(() => sphinx.stop(() => process.exit())) spider.stop(() => sphinx.stop(() => process.exit()))
} }
else else if(sphinx)
{ {
sphinx.stop(() => process.exit()) sphinx.stop(() => process.exit())
} }
else
{
process.exit()
}
}); });

View File

@ -10,8 +10,22 @@ const { spawn, exec } = require('child_process')
const appConfig = require('./config') const appConfig = require('./config')
const findFiles = require('./findFiles') const findFiles = require('./findFiles')
const _ = require('lodash') const _ = require('lodash')
const isRunning = require('is-running')
const portCheck = require('./portCheck')
const findGoodPort = async (port, host) => {
while (!(await portCheck(port, host))) {
port++
logT('sphinx', 'port is busy, listen on', port)
}
return port
}
const writeSphinxConfig = async (path, dbPath) => {
appConfig.sphinx.port = await findGoodPort(appConfig.sphinx.port)
appConfig.sphinx.interfacePort = await findGoodPort(appConfig.sphinx.interfacePort)
appConfig.sphinx = appConfig.sphinx
const writeSphinxConfig = (path, dbPath) => {
let config = ` let config = `
index torrents index torrents
{ {
@ -37,7 +51,10 @@ const writeSphinxConfig = (path, dbPath) => {
rt_attr_uint = completed rt_attr_uint = completed
rt_attr_timestamp = trackersChecked rt_attr_timestamp = trackersChecked
rt_attr_uint = good rt_attr_uint = good
rt_attr_uint = bad rt_attr_uint = bad
ngram_len = 1
ngram_chars = U+3000..U+2FA1F
} }
index files index files
@ -82,8 +99,8 @@ const writeSphinxConfig = (path, dbPath) => {
searchd searchd
{ {
listen = 9312 listen = 127.0.0.1:${appConfig.sphinx.interfacePort}
listen = 9306:mysql41 listen = 127.0.0.1:${appConfig.sphinx.port}:mysql41
read_timeout = 5 read_timeout = 5
max_children = 30 max_children = 30
seamless_rotate = 1 seamless_rotate = 1
@ -142,8 +159,8 @@ const writeSphinxConfig = (path, dbPath) => {
return {isInitDb} return {isInitDb}
} }
module.exports = (callback, dataDirectory, onClose) => { module.exports = async (callback, dataDirectory, onClose) => {
const start = (callback) => { const start = async (callback) => {
const sphinxPath = path.resolve(appPath('searchd')) const sphinxPath = path.resolve(appPath('searchd'))
logT('sphinx', 'Sphinx Path:', sphinxPath) logT('sphinx', 'Sphinx Path:', sphinxPath)
@ -156,7 +173,13 @@ module.exports = (callback, dataDirectory, onClose) => {
appConfig['dbPath'] = sphinxConfigDirectory appConfig['dbPath'] = sphinxConfigDirectory
} }
const { isInitDb } = writeSphinxConfig(sphinxConfigDirectory, appConfig.dbPath) // check external sphinx instance for using
const sphinxPid = `${sphinxConfigDirectory}/searchd.pid`
const isSphinxExternal = fs.existsSync(sphinxPid) && isRunning(parseInt(fs.readFileSync(sphinxPid)))
if(isSphinxExternal)
logT('sphinx', `founded running sphinx instance in ${sphinxPid}, using it`)
const { isInitDb } = isSphinxExternal ? {isInitDb: false} : await writeSphinxConfig(sphinxConfigDirectory, appConfig.dbPath)
const config = `${sphinxConfigDirectory}/sphinx.conf` const config = `${sphinxConfigDirectory}/sphinx.conf`
const options = ['--config', config] const options = ['--config', config]
@ -164,7 +187,10 @@ module.exports = (callback, dataDirectory, onClose) => {
{ {
options.push('--nodetach') options.push('--nodetach')
} }
const sphinx = spawn(sphinxPath, options)
const sphinx = !isSphinxExternal ? spawn(sphinxPath, options) :
{isExternal: true, on: (d,f) => {}, stdout: {on : (d,f)=>{} }}; // running stub
// remeber initizalizing of db // remeber initizalizing of db
sphinx.start = start sphinx.start = start
sphinx.isInitDb = isInitDb sphinx.isInitDb = isInitDb
@ -202,12 +228,16 @@ module.exports = (callback, dataDirectory, onClose) => {
} }
}) })
sphinx.on('close', (code, signal) => { const close = () => {
logT('sphinx', `sphinx closed with code ${code} and signal ${signal}`)
if(onClose && !sphinx.replaceOnClose) // sometime we don't want to call default callback if(onClose && !sphinx.replaceOnClose) // sometime we don't want to call default callback
onClose() onClose()
if(sphinx.onClose) if(sphinx.onClose)
sphinx.onClose() sphinx.onClose()
}
sphinx.on('close', (code, signal) => {
logT('sphinx', `sphinx closed with code ${code} and signal ${signal}`)
close()
}) })
sphinx.stop = (onFinish, replaceFinish) => { sphinx.stop = (onFinish, replaceFinish) => {
@ -216,7 +246,14 @@ module.exports = (callback, dataDirectory, onClose) => {
sphinx.onClose = onFinish sphinx.onClose = onFinish
if(replaceFinish) if(replaceFinish)
sphinx.replaceOnClose = true // sometime we don't want to call default callback sphinx.replaceOnClose = true // sometime we don't want to call default callback
exec(`"${sphinxPath}" --config "${config}" --stopwait`)
if (!sphinx.isExternal)
exec(`"${sphinxPath}" --config "${config}" --stopwait`)
else
{
logT('sphinx', `ignoring sphinx closing because external sphinx instance`)
close()
}
} }
sphinx.waitOptimized = (table) => new Promise((resolve) => { sphinx.waitOptimized = (table) => new Promise((resolve) => {
@ -227,6 +264,9 @@ module.exports = (callback, dataDirectory, onClose) => {
}) })
sphinx.fixDatabase = async () => { sphinx.fixDatabase = async () => {
if(sphinx.isExternal)
return
if(sphinx.fixing) if(sphinx.fixing)
return return
sphinx.fixing = true sphinx.fixing = true
@ -264,12 +304,13 @@ module.exports = (callback, dataDirectory, onClose) => {
sphinx.fixing = false sphinx.fixing = false
_.merge(sphinx, sphinx.start(callback)); _.merge(sphinx, await sphinx.start(callback));
} }
return sphinx if (isSphinxExternal && callback) setTimeout(()=>{logT('sphinx', 'external sphinx signalled');callback()}, 0);
return sphinx
} }
return start(callback) return await start(callback)
} }

View File

@ -31,6 +31,9 @@ const checkInternet = require('./checkInternet')
const {torrentTypeDetect} = require('../app/content'); const {torrentTypeDetect} = require('../app/content');
const torrentClient = require('./torrentClient') const torrentClient = require('./torrentClient')
const directoryFilesRecursive = require('./directoryFilesRecursive')
const _ = require('lodash')
const mime = require('mime');
// Start server // Start server
//server.listen(config.httpPort); //server.listen(config.httpPort);
@ -495,10 +498,10 @@ module.exports = function (send, recive, dataDirectory, version, env)
} }
const insertMetadata = (metadata, infohash, rinfo) => { const insertMetadata = (metadata, infohash, rinfo) => {
logT('spider', 'finded torrent', metadata.info.name, ' and add to database');
const bufferToString = (buffer) => Buffer.isBuffer(buffer) ? buffer.toString() : buffer const bufferToString = (buffer) => Buffer.isBuffer(buffer) ? buffer.toString() : buffer
logT('spider', 'finded torrent', bufferToString(metadata.info.name), 'and add to database');
const hash = infohash.toString('hex'); const hash = infohash.toString('hex');
let size = metadata.info.length ? metadata.info.length : 0; let size = metadata.info.length ? metadata.info.length : 0;
let filesCount = 1; let filesCount = 1;
@ -595,9 +598,25 @@ module.exports = function (send, recive, dataDirectory, version, env)
} }
recive('dropTorrents', (pathTorrents) => { recive('dropTorrents', (pathTorrents) => {
logT('drop', 'drop torrents and replicate from original') logT('drop', 'drop torrents and replicate from original torrent files')
const torrents = pathTorrents.map(path => parseTorrent(fs.readFileSync(path))) const torrents = _.flatten(pathTorrents.map(path => directoryFilesRecursive(path)))
torrents.forEach(torrent => insertMetadata(torrent, torrent.infoHashBuffer, {address: '127.0.0.1', port: 666})) .filter(path => mime.getType(path) == 'application/x-bittorrent')
.map(path => {
try {
return ({
torrent: parseTorrent(fs.readFileSync(path)),
path
})
} catch(err) {
logT('drop', 'error on parse torrent:', path)
}
})
.filter(torrent => torrent)
torrents.forEach(({torrent, path}) => {
insertMetadata(torrent, torrent.infoHashBuffer, {address: '127.0.0.1', port: 666})
logT('drop', 'copied torrent to db:', path)
})
logT('drop', 'torrent finish adding to db')
}) })
checkInternet((connected) => { checkInternet((connected) => {
@ -814,10 +833,13 @@ module.exports = function (send, recive, dataDirectory, version, env)
})) }))
}) })
await Promise.all([ if(!this.preventNetworkOnExit)
saveBootstrapPeers('api.myjson.com', '/bins/1e5rmh'), {
saveBootstrapPeers('jsonblob.com', '/api/jsonBlob/013a4415-3533-11e8-8290-a901f3cf34aa') await Promise.all([
]) saveBootstrapPeers('api.myjson.com', '/bins/1e5rmh'),
saveBootstrapPeers('jsonblob.com', '/api/jsonBlob/013a4415-3533-11e8-8290-a901f3cf34aa')
])
}
} }
} }

View File

@ -8,7 +8,7 @@ const startApplication = function() {
args: ["."], args: ["."],
startTimeout: 30000, startTimeout: 30000,
waitTimeout: 30000, waitTimeout: 30000,
quitTimeout: 10000 quitTimeout: 15000
}); });
return this.app.start(); return this.app.start();
}; };

View File

@ -9,6 +9,8 @@ describe("application", () => {
it("check start", async function() { it("check start", async function() {
const { app } = this const { app } = this
await app.client.waitForExist('#index-window') await app.client.waitForExist('#index-window')
// fix realtime config
require('../src/background/config').reload(await app.electron.remote.app.getPath('userData'))
}); });
//TESTS //TESTS

44
tests/seach.test.js Normal file
View File

@ -0,0 +1,44 @@
import { assert } from "chai";
const asyncWait = require('../src/background/asyncWait')
describe("search", function() {
this.timeout(30000);
it("dht seach", async function() {
this.timeout(45000);
const { app } = this
await app.client.waitForExist('#searchInput')
await app.client.$('#searchInput').setValue('1413ba1915affdc3de7e1a81d6fdc32ef19395c9')
await app.client.click('#search')
await app.client.waitForExist('.torrentRow .torrentName')
const value = await app.client.$('.torrentRow .torrentName').getText()
assert.equal(value, 'Roblox_setup.exe')
})
it("sphinx search", async function() {
const { app } = this
await app.client.$('#searchInput').setValue('Roblox_setup')
await app.client.click('#search')
await app.client.waitForExist('.torrentRow .torrentName')
const results = (await app.client.$$('.torrentRow .torrentName')).length
assert(results >= 1)
})
it("sphinx particial search", async function() {
const { app } = this
await app.client.$('#searchInput').setValue('Roblo')
await app.client.click('#search')
await app.client.waitForExist('.torrentRow .torrentName')
const results = (await app.client.$$('.torrentRow .torrentName')).length
assert(results >= 1)
})
it("magnet search", async function() {
const { app } = this
await app.client.$('#searchInput').setValue('magnet:?xt=urn:btih:1413ba1915affdc3de7e1a81d6fdc32ef19395c9')
await app.client.click('#search')
await app.client.waitForExist('.torrentRow .torrentName')
const results = (await app.client.$$('.torrentRow .torrentName')).length
assert(results == 1)
})
});