Merge branch 'master' into services
This commit is contained in:
commit
b4a17bc090
@ -11,6 +11,7 @@
|
||||
"react"
|
||||
],
|
||||
"rules": {
|
||||
"indent": ["error", "tab"]
|
||||
"indent": ["error", "tab"],
|
||||
"space-infix-ops": ["error", {"int32Hint": false}]
|
||||
}
|
||||
}
|
33
package-lock.json
generated
33
package-lock.json
generated
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "rats-search",
|
||||
"version": "0.27.0",
|
||||
"version": "0.28.0",
|
||||
"lockfileVersion": 1,
|
||||
"requires": true,
|
||||
"dependencies": {
|
||||
@ -10571,6 +10571,11 @@
|
||||
"integrity": "sha1-EaBgVotnM5REAz0BJaYaINVk+zQ=",
|
||||
"dev": true
|
||||
},
|
||||
"is-running": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/is-running/-/is-running-2.1.0.tgz",
|
||||
"integrity": "sha1-MKc/9cw4VOT8JUkICen1q/jeCeA="
|
||||
},
|
||||
"is-scoped": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-scoped/-/is-scoped-1.0.0.tgz",
|
||||
@ -10674,6 +10679,17 @@
|
||||
"requires": {
|
||||
"node-fetch": "^1.0.1",
|
||||
"whatwg-fetch": ">=0.10.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"node-fetch": {
|
||||
"version": "1.7.3",
|
||||
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-1.7.3.tgz",
|
||||
"integrity": "sha512-NhZ4CsKx7cYm2vSrBAr2PvFOe6sWDf0UYLRqA6svUYg7+/TSfVAu49jYC4BvQ4Sms9SZgdqGBgroqfDhJdTyKQ==",
|
||||
"requires": {
|
||||
"encoding": "^0.1.11",
|
||||
"is-stream": "^1.0.1"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"isstream": {
|
||||
@ -11912,9 +11928,9 @@
|
||||
}
|
||||
},
|
||||
"mime": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/mime/-/mime-2.2.0.tgz",
|
||||
"integrity": "sha512-0Qz9uF1ATtl8RKJG4VRfOymh7PyEor6NbrI/61lRfuRe4vx9SNATrvAeTj2EWVRKjEQGskrzWkJBBY5NbaVHIA=="
|
||||
"version": "2.3.1",
|
||||
"resolved": "https://registry.npmjs.org/mime/-/mime-2.3.1.tgz",
|
||||
"integrity": "sha512-OEUllcVoydBHGN1z84yfQDimn58pZNNNXgZlHXSboxMlFvgI6MXSWpWKpFRra7H1HxpVhHTkrghfRW49k6yjeg=="
|
||||
},
|
||||
"mime-db": {
|
||||
"version": "1.30.0",
|
||||
@ -12413,15 +12429,6 @@
|
||||
"integrity": "sha1-VfuN62mQcHB/tn+RpGDwRIKUx30=",
|
||||
"dev": true
|
||||
},
|
||||
"node-fetch": {
|
||||
"version": "1.7.3",
|
||||
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-1.7.3.tgz",
|
||||
"integrity": "sha512-NhZ4CsKx7cYm2vSrBAr2PvFOe6sWDf0UYLRqA6svUYg7+/TSfVAu49jYC4BvQ4Sms9SZgdqGBgroqfDhJdTyKQ==",
|
||||
"requires": {
|
||||
"encoding": "^0.1.11",
|
||||
"is-stream": "^1.0.1"
|
||||
}
|
||||
},
|
||||
"node-libs-browser": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/node-libs-browser/-/node-libs-browser-2.1.0.tgz",
|
||||
|
@ -125,9 +125,11 @@
|
||||
"google": "^2.1.0",
|
||||
"iconv-lite": "^0.4.19",
|
||||
"ipaddr.js": "^1.5.4",
|
||||
"is-running": "^2.1.0",
|
||||
"json-socket": "^0.3.0",
|
||||
"lodash": "^4.17.5",
|
||||
"material-ui": "^0.20.0",
|
||||
"mime": "^2.3.1",
|
||||
"moment": "^2.20.1",
|
||||
"mysql": "^2.15.0",
|
||||
"nat-upnp": "^1.1.1",
|
||||
|
@ -151,7 +151,7 @@ class App extends Component {
|
||||
if(!files || files.length == 0)
|
||||
return
|
||||
|
||||
torrentSocket.emit('dropTorrents', Array.from(files).filter(file => file.type == 'application/x-bittorrent').map(file => file.path))
|
||||
torrentSocket.emit('dropTorrents', Array.from(files).filter(file => (file.type == 'application/x-bittorrent' || file.type == '')).map(file => file.path))
|
||||
}
|
||||
|
||||
document.addEventListener('dragover', (event) => {
|
||||
|
@ -287,6 +287,7 @@ class Search extends Component {
|
||||
floatingLabelText={__('What to search?')}
|
||||
fullWidth={true}
|
||||
ref='searchInput'
|
||||
id='searchInput'
|
||||
defaultValue={this.searchValue}
|
||||
errorText={this.searchError}
|
||||
onKeyPress={(e) => {
|
||||
@ -335,7 +336,7 @@ class Search extends Component {
|
||||
</Tooltip>
|
||||
</div>
|
||||
|
||||
<RaisedButton style={{marginLeft: '10px'}} label={__('Search')} primary={true} onClick={() =>{
|
||||
<RaisedButton style={{marginLeft: '10px'}} id='search' label={__('Search')} primary={true} onClick={() =>{
|
||||
this.search()
|
||||
}} />
|
||||
</div>
|
||||
|
@ -262,6 +262,7 @@ export default class Torrent extends Component {
|
||||
return (
|
||||
<div>
|
||||
<ListItem
|
||||
className='torrentRow'
|
||||
innerDivStyle={{paddingRight: 84}}
|
||||
onClick={(e) => {
|
||||
const link = '/torrent/' + torrent.hash;
|
||||
@ -284,7 +285,7 @@ export default class Torrent extends Component {
|
||||
if(node)
|
||||
node.onclick = () => { return false }
|
||||
}}>
|
||||
<span className='break-word' style={{
|
||||
<span className='break-word torrentName' style={{
|
||||
color: torrent.contentCategory != 'xxx' ? (torrent.peer ? '#5643db' : 'black') : (torrent.peer ? '#9083e2' : 'grey')
|
||||
}}>
|
||||
{torrent.name}
|
||||
|
1
src/background/asyncWait.js
Normal file
1
src/background/asyncWait.js
Normal file
@ -0,0 +1 @@
|
||||
module.exports = (time) => new Promise((resolve) => setTimeout(resolve, time))
|
@ -171,8 +171,8 @@ autoUpdater.on('update-downloaded', () => {
|
||||
|
||||
let tray = undefined
|
||||
|
||||
app.on("ready", () => {
|
||||
sphinx = startSphinx(() => {
|
||||
app.on("ready", async () => {
|
||||
sphinx = await startSphinx(() => {
|
||||
|
||||
mainWindow = createWindow("main", {
|
||||
width: 1000,
|
||||
@ -367,3 +367,8 @@ rl.on("SIGINT", function () {
|
||||
process.on("SIGINT", () => {
|
||||
stop()
|
||||
});
|
||||
|
||||
process.on("exit", () => {
|
||||
if(spider)
|
||||
spider.preventNetworkOnExit = true
|
||||
})
|
@ -28,6 +28,7 @@ let config = {
|
||||
sphinx: {
|
||||
host : '127.0.0.1',
|
||||
port : 9306,
|
||||
interfacePort: 9312,
|
||||
connectionLimit: 10
|
||||
},
|
||||
|
||||
@ -107,7 +108,7 @@ const configProxy = new Proxy(config, {
|
||||
})
|
||||
|
||||
config.load = () => {
|
||||
debug('loading configuration')
|
||||
debug('loading configuration', configPath)
|
||||
if(fs.existsSync(configPath))
|
||||
{
|
||||
debug('finded configuration', configPath)
|
||||
@ -133,4 +134,9 @@ config.load = () => {
|
||||
return configProxy
|
||||
}
|
||||
|
||||
config.reload = (path) => {
|
||||
configPath = path + '/rats.json'
|
||||
return config.load()
|
||||
}
|
||||
|
||||
module.exports = configProxy.load()
|
@ -12,7 +12,7 @@ const getTorrent = require('./gettorrent')
|
||||
const startSphinx = require('./sphinx')
|
||||
|
||||
|
||||
const currentVersion = 5
|
||||
const currentVersion = 6
|
||||
|
||||
|
||||
module.exports = async (callback, mainWindow, sphinxApp) => {
|
||||
@ -104,6 +104,112 @@ module.exports = async (callback, mainWindow, sphinxApp) => {
|
||||
|
||||
const patch = async (version) => {
|
||||
logT('patcher', 'db version', version)
|
||||
|
||||
const rebuildTorrentsFull = async () => {
|
||||
|
||||
if(sphinxApp.isExternal)
|
||||
{
|
||||
logTE('patcher', 'this patch avaiable only not on external db')
|
||||
throw new Error('this patch avaiable only not on external db')
|
||||
}
|
||||
|
||||
let i = 1
|
||||
const torrents = (await sphinx.query("SELECT COUNT(*) AS c FROM torrents"))[0].c
|
||||
|
||||
let torrentsArray = []
|
||||
|
||||
let patch = 1
|
||||
await forBigTable(sphinx, 'torrents', async (torrent) => {
|
||||
logT('patcher', 'remember index', torrent.id, torrent.name, '[', i, 'of', torrents, ']')
|
||||
if(patchWindow)
|
||||
patchWindow.webContents.send('reindex', {field: torrent.name, index: i++, all: torrents, torrent: true})
|
||||
|
||||
torrentsArray.push(torrent)
|
||||
// keep memory safe
|
||||
if(torrentsArray.length >= 20000)
|
||||
{
|
||||
fs.writeFileSync(`${sphinxApp.directoryPath}/torrents.patch.${patch++}`, JSON.stringify(torrentsArray, null, 4), 'utf8');
|
||||
logT('patcher', 'write torrents dump', `${sphinxApp.directoryPath}/torrents.patch.${patch - 1}`)
|
||||
torrentsArray = []
|
||||
}
|
||||
})
|
||||
// keep last elemets
|
||||
if(torrentsArray.length > 0)
|
||||
{
|
||||
fs.writeFileSync(`${sphinxApp.directoryPath}/torrents.patch.${patch}`, JSON.stringify(torrentsArray, null, 4), 'utf8');
|
||||
logT('patcher', 'write torrents dump', `${sphinxApp.directoryPath}/torrents.patch.${patch}`)
|
||||
torrentsArray = []
|
||||
}
|
||||
else
|
||||
{
|
||||
patch-- //no last patch
|
||||
}
|
||||
|
||||
// stop sphinx
|
||||
await new Promise((resolve) => {
|
||||
// reopen sphinx
|
||||
sphinx.destroy() // destory connection
|
||||
sphinxApp.stop(resolve, true)
|
||||
})
|
||||
|
||||
logT('patcher', 'sphinx stoped for patching')
|
||||
|
||||
await new Promise((resolve) => {
|
||||
glob(`${sphinxApp.directoryPathDb}/torrents.*`, function (er, files) {
|
||||
files.forEach(file => {
|
||||
logT('patcher', 'clear torrents file', file)
|
||||
fs.unlinkSync(path.resolve(file))
|
||||
})
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
|
||||
logT('patcher', 'cleaned torrents db structure, rectreating again')
|
||||
i = 1
|
||||
await new Promise(async (resolve) => {
|
||||
// reopen sphinx
|
||||
sphinxApp = await sphinxApp.start(async () => {
|
||||
sphinx = await single().waitConnection()
|
||||
resolve()
|
||||
}) // same args
|
||||
})
|
||||
|
||||
logT('patcher', 'sphinx restarted, patch db now')
|
||||
|
||||
for(let k = 1; k <= patch; k++)
|
||||
{
|
||||
torrentsArray = JSON.parse(fs.readFileSync(`${sphinxApp.directoryPath}/torrents.patch.${k}`, 'utf8'))
|
||||
logT('patcher', 'read torrents dump', `${sphinxApp.directoryPath}/torrents.patch.${k}`)
|
||||
await asyncForEach(torrentsArray, async (torrent) => {
|
||||
logT('patcher', 'update index', torrent.id, torrent.name, '[', i, 'of', torrents, ']')
|
||||
if(patchWindow)
|
||||
patchWindow.webContents.send('reindex', {field: torrent.name, index: i++, all: torrents, torrent: true})
|
||||
|
||||
torrent.nameIndex = torrent.name
|
||||
await sphinx.query(`DELETE FROM torrents WHERE id = ${torrent.id}`)
|
||||
await sphinx.insertValues('torrents', torrent)
|
||||
})
|
||||
}
|
||||
|
||||
await new Promise((resolve) => {
|
||||
glob(`${sphinxApp.directoryPath}/torrents.patch.*`, function (er, files) {
|
||||
files.forEach(file => {
|
||||
logT('patcher', 'clear dump file', file)
|
||||
fs.unlinkSync(path.resolve(file))
|
||||
})
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
|
||||
torrentsArray = null
|
||||
|
||||
logT('patcher', 'optimizing torrents')
|
||||
if(patchWindow)
|
||||
patchWindow.webContents.send('optimize', {field: 'torrents'})
|
||||
sphinx.query(`OPTIMIZE INDEX torrents`)
|
||||
await sphinxApp.waitOptimized('torrents')
|
||||
}
|
||||
|
||||
switch(version)
|
||||
{
|
||||
case 1:
|
||||
@ -192,105 +298,15 @@ module.exports = async (callback, mainWindow, sphinxApp) => {
|
||||
case 4:
|
||||
{
|
||||
openPatchWindow()
|
||||
|
||||
let i = 1
|
||||
const torrents = (await sphinx.query("SELECT COUNT(*) AS c FROM torrents"))[0].c
|
||||
|
||||
let torrentsArray = []
|
||||
|
||||
let patch = 1
|
||||
await forBigTable(sphinx, 'torrents', async (torrent) => {
|
||||
logT('patcher', 'remember index', torrent.id, torrent.name, '[', i, 'of', torrents, ']')
|
||||
if(patchWindow)
|
||||
patchWindow.webContents.send('reindex', {field: torrent.name, index: i++, all: torrents, torrent: true})
|
||||
|
||||
torrentsArray.push(torrent)
|
||||
// keep memory safe
|
||||
if(torrentsArray.length >= 20000)
|
||||
{
|
||||
fs.writeFileSync(`${sphinxApp.directoryPath}/torrents.patch.${patch++}`, JSON.stringify(torrentsArray, null, 4), 'utf8');
|
||||
logT('patcher', 'write torrents dump', `${sphinxApp.directoryPath}/torrents.patch.${patch-1}`)
|
||||
torrentsArray = []
|
||||
}
|
||||
})
|
||||
// keep last elemets
|
||||
if(torrentsArray.length > 0)
|
||||
{
|
||||
fs.writeFileSync(`${sphinxApp.directoryPath}/torrents.patch.${patch}`, JSON.stringify(torrentsArray, null, 4), 'utf8');
|
||||
logT('patcher', 'write torrents dump', `${sphinxApp.directoryPath}/torrents.patch.${patch}`)
|
||||
torrentsArray = []
|
||||
}
|
||||
else
|
||||
{
|
||||
patch-- //no last patch
|
||||
}
|
||||
|
||||
// stop sphinx
|
||||
await new Promise((resolve) => {
|
||||
// reopen sphinx
|
||||
sphinx.destroy() // destory connection
|
||||
sphinxApp.stop(resolve, true)
|
||||
})
|
||||
|
||||
logT('patcher', 'sphinx stoped for patching')
|
||||
|
||||
await new Promise((resolve) => {
|
||||
glob(`${sphinxApp.directoryPathDb}/torrents.*`, function (er, files) {
|
||||
files.forEach(file => {
|
||||
logT('patcher', 'clear torrents file', file)
|
||||
fs.unlinkSync(path.resolve(file))
|
||||
})
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
|
||||
logT('patcher', 'cleaned torrents db structure, rectreating again')
|
||||
i = 1
|
||||
await new Promise((resolve) => {
|
||||
// reopen sphinx
|
||||
sphinxApp = sphinxApp.start(async () => {
|
||||
sphinx = await single().waitConnection()
|
||||
resolve()
|
||||
}) // same args
|
||||
})
|
||||
|
||||
logT('patcher', 'sphinx restarted, patch db now')
|
||||
|
||||
for(let k = 1; k <= patch; k++)
|
||||
{
|
||||
torrentsArray = JSON.parse(fs.readFileSync(`${sphinxApp.directoryPath}/torrents.patch.${k}`, 'utf8'))
|
||||
logT('patcher', 'read torrents dump', `${sphinxApp.directoryPath}/torrents.patch.${k}`)
|
||||
await asyncForEach(torrentsArray, async (torrent) => {
|
||||
logT('patcher', 'update index', torrent.id, torrent.name, '[', i, 'of', torrents, ']')
|
||||
if(patchWindow)
|
||||
patchWindow.webContents.send('reindex', {field: torrent.name, index: i++, all: torrents, torrent: true})
|
||||
|
||||
torrent.nameIndex = torrent.name
|
||||
await sphinx.query(`DELETE FROM torrents WHERE id = ${torrent.id}`)
|
||||
await sphinx.insertValues('torrents', torrent)
|
||||
})
|
||||
}
|
||||
|
||||
await new Promise((resolve) => {
|
||||
glob(`${sphinxApp.directoryPath}/torrents.patch.*`, function (er, files) {
|
||||
files.forEach(file => {
|
||||
logT('patcher', 'clear dump file', file)
|
||||
fs.unlinkSync(path.resolve(file))
|
||||
})
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
|
||||
torrentsArray = null
|
||||
|
||||
logT('patcher', 'optimizing torrents')
|
||||
if(patchWindow)
|
||||
patchWindow.webContents.send('optimize', {field: 'torrents'})
|
||||
sphinx.query(`OPTIMIZE INDEX torrents`)
|
||||
await sphinxApp.waitOptimized('torrents')
|
||||
|
||||
await rebuildTorrentsFull()
|
||||
await setVersion(5)
|
||||
}
|
||||
case 5:
|
||||
{
|
||||
openPatchWindow()
|
||||
await rebuildTorrentsFull()
|
||||
await setVersion(6)
|
||||
}
|
||||
}
|
||||
logT('patcher', 'db patch done')
|
||||
sphinx.destroy()
|
||||
|
24
src/background/directoryFilesRecursive.js
Normal file
24
src/background/directoryFilesRecursive.js
Normal file
@ -0,0 +1,24 @@
|
||||
const fs = require('fs')
|
||||
|
||||
function directoryFilesRecursive (directory, filesList = []) {
|
||||
let files;
|
||||
try {
|
||||
files = fs.readdirSync(directory)
|
||||
} catch(err) {
|
||||
if(err.code !== 'ENOTDIR')
|
||||
throw err
|
||||
else
|
||||
return [directory] // if file, return file
|
||||
}
|
||||
for (const file of files) {
|
||||
const filePath = `${directory}/${file}`
|
||||
if (fs.statSync(filePath).isDirectory()) {
|
||||
directoryFilesRecursive(filePath, filesList)
|
||||
} else {
|
||||
filesList.push(filePath)
|
||||
}
|
||||
}
|
||||
return filesList
|
||||
}
|
||||
|
||||
module.exports = directoryFilesRecursive
|
11
src/background/portCheck.js
Normal file
11
src/background/portCheck.js
Normal file
@ -0,0 +1,11 @@
|
||||
const net = require('net')
|
||||
|
||||
module.exports = (port, host = '127.0.0.1') => new Promise((resolve, reject) => {
|
||||
const tester = net.createServer()
|
||||
.once('error', err => (err.code === 'EADDRINUSE' ? resolve(false) : reject(err)))
|
||||
.once('listening', () => tester.once('close', () => resolve(true)).close())
|
||||
.listen({
|
||||
host,
|
||||
port
|
||||
})
|
||||
})
|
@ -68,14 +68,17 @@ io.on('connection', (socket) =>
|
||||
}
|
||||
})
|
||||
|
||||
sphinx = startSphinx(() => {
|
||||
const start = async () =>
|
||||
{
|
||||
sphinx = await startSphinx(() => {
|
||||
dbPatcher(() => {
|
||||
spider = new spiderCall((...data) => io.sockets.emit(...data), (message, callback) => {
|
||||
socketMessages[message] = callback
|
||||
}, path.resolve(packageJson.serverDataDirectory), packageJson.version, 'production')
|
||||
}, null, sphinx)
|
||||
}, path.resolve(packageJson.serverDataDirectory), () => {})
|
||||
|
||||
}
|
||||
start()
|
||||
|
||||
var rl = require("readline").createInterface({
|
||||
input: process.stdin,
|
||||
@ -92,8 +95,12 @@ process.on("SIGINT", () => {
|
||||
{
|
||||
spider.stop(() => sphinx.stop(() => process.exit()))
|
||||
}
|
||||
else
|
||||
else if(sphinx)
|
||||
{
|
||||
sphinx.stop(() => process.exit())
|
||||
}
|
||||
else
|
||||
{
|
||||
process.exit()
|
||||
}
|
||||
});
|
@ -10,8 +10,22 @@ const { spawn, exec } = require('child_process')
|
||||
const appConfig = require('./config')
|
||||
const findFiles = require('./findFiles')
|
||||
const _ = require('lodash')
|
||||
const isRunning = require('is-running')
|
||||
const portCheck = require('./portCheck')
|
||||
|
||||
const findGoodPort = async (port, host) => {
|
||||
while (!(await portCheck(port, host))) {
|
||||
port++
|
||||
logT('sphinx', 'port is busy, listen on', port)
|
||||
}
|
||||
return port
|
||||
}
|
||||
|
||||
const writeSphinxConfig = async (path, dbPath) => {
|
||||
appConfig.sphinx.port = await findGoodPort(appConfig.sphinx.port)
|
||||
appConfig.sphinx.interfacePort = await findGoodPort(appConfig.sphinx.interfacePort)
|
||||
appConfig.sphinx = appConfig.sphinx
|
||||
|
||||
const writeSphinxConfig = (path, dbPath) => {
|
||||
let config = `
|
||||
index torrents
|
||||
{
|
||||
@ -38,6 +52,9 @@ const writeSphinxConfig = (path, dbPath) => {
|
||||
rt_attr_timestamp = trackersChecked
|
||||
rt_attr_uint = good
|
||||
rt_attr_uint = bad
|
||||
|
||||
ngram_len = 1
|
||||
ngram_chars = U+3000..U+2FA1F
|
||||
}
|
||||
|
||||
index files
|
||||
@ -82,8 +99,8 @@ const writeSphinxConfig = (path, dbPath) => {
|
||||
|
||||
searchd
|
||||
{
|
||||
listen = 9312
|
||||
listen = 9306:mysql41
|
||||
listen = 127.0.0.1:${appConfig.sphinx.interfacePort}
|
||||
listen = 127.0.0.1:${appConfig.sphinx.port}:mysql41
|
||||
read_timeout = 5
|
||||
max_children = 30
|
||||
seamless_rotate = 1
|
||||
@ -142,8 +159,8 @@ const writeSphinxConfig = (path, dbPath) => {
|
||||
return {isInitDb}
|
||||
}
|
||||
|
||||
module.exports = (callback, dataDirectory, onClose) => {
|
||||
const start = (callback) => {
|
||||
module.exports = async (callback, dataDirectory, onClose) => {
|
||||
const start = async (callback) => {
|
||||
|
||||
const sphinxPath = path.resolve(appPath('searchd'))
|
||||
logT('sphinx', 'Sphinx Path:', sphinxPath)
|
||||
@ -156,7 +173,13 @@ module.exports = (callback, dataDirectory, onClose) => {
|
||||
appConfig['dbPath'] = sphinxConfigDirectory
|
||||
}
|
||||
|
||||
const { isInitDb } = writeSphinxConfig(sphinxConfigDirectory, appConfig.dbPath)
|
||||
// check external sphinx instance for using
|
||||
const sphinxPid = `${sphinxConfigDirectory}/searchd.pid`
|
||||
const isSphinxExternal = fs.existsSync(sphinxPid) && isRunning(parseInt(fs.readFileSync(sphinxPid)))
|
||||
if(isSphinxExternal)
|
||||
logT('sphinx', `founded running sphinx instance in ${sphinxPid}, using it`)
|
||||
|
||||
const { isInitDb } = isSphinxExternal ? {isInitDb: false} : await writeSphinxConfig(sphinxConfigDirectory, appConfig.dbPath)
|
||||
|
||||
const config = `${sphinxConfigDirectory}/sphinx.conf`
|
||||
const options = ['--config', config]
|
||||
@ -164,7 +187,10 @@ module.exports = (callback, dataDirectory, onClose) => {
|
||||
{
|
||||
options.push('--nodetach')
|
||||
}
|
||||
const sphinx = spawn(sphinxPath, options)
|
||||
|
||||
const sphinx = !isSphinxExternal ? spawn(sphinxPath, options) :
|
||||
{isExternal: true, on: (d,f) => {}, stdout: {on : (d,f)=>{} }}; // running stub
|
||||
|
||||
// remeber initizalizing of db
|
||||
sphinx.start = start
|
||||
sphinx.isInitDb = isInitDb
|
||||
@ -202,12 +228,16 @@ module.exports = (callback, dataDirectory, onClose) => {
|
||||
}
|
||||
})
|
||||
|
||||
sphinx.on('close', (code, signal) => {
|
||||
logT('sphinx', `sphinx closed with code ${code} and signal ${signal}`)
|
||||
const close = () => {
|
||||
if(onClose && !sphinx.replaceOnClose) // sometime we don't want to call default callback
|
||||
onClose()
|
||||
if(sphinx.onClose)
|
||||
sphinx.onClose()
|
||||
}
|
||||
|
||||
sphinx.on('close', (code, signal) => {
|
||||
logT('sphinx', `sphinx closed with code ${code} and signal ${signal}`)
|
||||
close()
|
||||
})
|
||||
|
||||
sphinx.stop = (onFinish, replaceFinish) => {
|
||||
@ -216,7 +246,14 @@ module.exports = (callback, dataDirectory, onClose) => {
|
||||
sphinx.onClose = onFinish
|
||||
if(replaceFinish)
|
||||
sphinx.replaceOnClose = true // sometime we don't want to call default callback
|
||||
|
||||
if (!sphinx.isExternal)
|
||||
exec(`"${sphinxPath}" --config "${config}" --stopwait`)
|
||||
else
|
||||
{
|
||||
logT('sphinx', `ignoring sphinx closing because external sphinx instance`)
|
||||
close()
|
||||
}
|
||||
}
|
||||
|
||||
sphinx.waitOptimized = (table) => new Promise((resolve) => {
|
||||
@ -227,6 +264,9 @@ module.exports = (callback, dataDirectory, onClose) => {
|
||||
})
|
||||
|
||||
sphinx.fixDatabase = async () => {
|
||||
if(sphinx.isExternal)
|
||||
return
|
||||
|
||||
if(sphinx.fixing)
|
||||
return
|
||||
sphinx.fixing = true
|
||||
@ -264,12 +304,13 @@ module.exports = (callback, dataDirectory, onClose) => {
|
||||
|
||||
sphinx.fixing = false
|
||||
|
||||
_.merge(sphinx, sphinx.start(callback));
|
||||
_.merge(sphinx, await sphinx.start(callback));
|
||||
}
|
||||
|
||||
if (isSphinxExternal && callback) setTimeout(()=>{logT('sphinx', 'external sphinx signalled');callback()}, 0);
|
||||
|
||||
return sphinx
|
||||
|
||||
}
|
||||
|
||||
return start(callback)
|
||||
return await start(callback)
|
||||
}
|
@ -31,6 +31,9 @@ const checkInternet = require('./checkInternet')
|
||||
const {torrentTypeDetect} = require('../app/content');
|
||||
|
||||
const torrentClient = require('./torrentClient')
|
||||
const directoryFilesRecursive = require('./directoryFilesRecursive')
|
||||
const _ = require('lodash')
|
||||
const mime = require('mime');
|
||||
|
||||
// Start server
|
||||
//server.listen(config.httpPort);
|
||||
@ -495,10 +498,10 @@ module.exports = function (send, recive, dataDirectory, version, env)
|
||||
}
|
||||
|
||||
const insertMetadata = (metadata, infohash, rinfo) => {
|
||||
logT('spider', 'finded torrent', metadata.info.name, ' and add to database');
|
||||
|
||||
const bufferToString = (buffer) => Buffer.isBuffer(buffer) ? buffer.toString() : buffer
|
||||
|
||||
logT('spider', 'finded torrent', bufferToString(metadata.info.name), 'and add to database');
|
||||
|
||||
const hash = infohash.toString('hex');
|
||||
let size = metadata.info.length ? metadata.info.length : 0;
|
||||
let filesCount = 1;
|
||||
@ -595,9 +598,25 @@ module.exports = function (send, recive, dataDirectory, version, env)
|
||||
}
|
||||
|
||||
recive('dropTorrents', (pathTorrents) => {
|
||||
logT('drop', 'drop torrents and replicate from original')
|
||||
const torrents = pathTorrents.map(path => parseTorrent(fs.readFileSync(path)))
|
||||
torrents.forEach(torrent => insertMetadata(torrent, torrent.infoHashBuffer, {address: '127.0.0.1', port: 666}))
|
||||
logT('drop', 'drop torrents and replicate from original torrent files')
|
||||
const torrents = _.flatten(pathTorrents.map(path => directoryFilesRecursive(path)))
|
||||
.filter(path => mime.getType(path) == 'application/x-bittorrent')
|
||||
.map(path => {
|
||||
try {
|
||||
return ({
|
||||
torrent: parseTorrent(fs.readFileSync(path)),
|
||||
path
|
||||
})
|
||||
} catch(err) {
|
||||
logT('drop', 'error on parse torrent:', path)
|
||||
}
|
||||
})
|
||||
.filter(torrent => torrent)
|
||||
torrents.forEach(({torrent, path}) => {
|
||||
insertMetadata(torrent, torrent.infoHashBuffer, {address: '127.0.0.1', port: 666})
|
||||
logT('drop', 'copied torrent to db:', path)
|
||||
})
|
||||
logT('drop', 'torrent finish adding to db')
|
||||
})
|
||||
|
||||
checkInternet((connected) => {
|
||||
@ -814,12 +833,15 @@ module.exports = function (send, recive, dataDirectory, version, env)
|
||||
}))
|
||||
})
|
||||
|
||||
if(!this.preventNetworkOnExit)
|
||||
{
|
||||
await Promise.all([
|
||||
saveBootstrapPeers('api.myjson.com', '/bins/1e5rmh'),
|
||||
saveBootstrapPeers('jsonblob.com', '/api/jsonBlob/013a4415-3533-11e8-8290-a901f3cf34aa')
|
||||
])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logT('close', 'closing p2p...')
|
||||
// don't listen spider peer appears
|
||||
|
@ -8,7 +8,7 @@ const startApplication = function() {
|
||||
args: ["."],
|
||||
startTimeout: 30000,
|
||||
waitTimeout: 30000,
|
||||
quitTimeout: 10000
|
||||
quitTimeout: 15000
|
||||
});
|
||||
return this.app.start();
|
||||
};
|
||||
|
@ -9,6 +9,8 @@ describe("application", () => {
|
||||
it("check start", async function() {
|
||||
const { app } = this
|
||||
await app.client.waitForExist('#index-window')
|
||||
// fix realtime config
|
||||
require('../src/background/config').reload(await app.electron.remote.app.getPath('userData'))
|
||||
});
|
||||
|
||||
//TESTS
|
||||
|
44
tests/seach.test.js
Normal file
44
tests/seach.test.js
Normal file
@ -0,0 +1,44 @@
|
||||
import { assert } from "chai";
|
||||
const asyncWait = require('../src/background/asyncWait')
|
||||
|
||||
describe("search", function() {
|
||||
this.timeout(30000);
|
||||
|
||||
it("dht seach", async function() {
|
||||
this.timeout(45000);
|
||||
const { app } = this
|
||||
await app.client.waitForExist('#searchInput')
|
||||
await app.client.$('#searchInput').setValue('1413ba1915affdc3de7e1a81d6fdc32ef19395c9')
|
||||
await app.client.click('#search')
|
||||
await app.client.waitForExist('.torrentRow .torrentName')
|
||||
const value = await app.client.$('.torrentRow .torrentName').getText()
|
||||
assert.equal(value, 'Roblox_setup.exe')
|
||||
})
|
||||
|
||||
it("sphinx search", async function() {
|
||||
const { app } = this
|
||||
await app.client.$('#searchInput').setValue('Roblox_setup')
|
||||
await app.client.click('#search')
|
||||
await app.client.waitForExist('.torrentRow .torrentName')
|
||||
const results = (await app.client.$$('.torrentRow .torrentName')).length
|
||||
assert(results >= 1)
|
||||
})
|
||||
|
||||
it("sphinx particial search", async function() {
|
||||
const { app } = this
|
||||
await app.client.$('#searchInput').setValue('Roblo')
|
||||
await app.client.click('#search')
|
||||
await app.client.waitForExist('.torrentRow .torrentName')
|
||||
const results = (await app.client.$$('.torrentRow .torrentName')).length
|
||||
assert(results >= 1)
|
||||
})
|
||||
|
||||
it("magnet search", async function() {
|
||||
const { app } = this
|
||||
await app.client.$('#searchInput').setValue('magnet:?xt=urn:btih:1413ba1915affdc3de7e1a81d6fdc32ef19395c9')
|
||||
await app.client.click('#search')
|
||||
await app.client.waitForExist('.torrentRow .torrentName')
|
||||
const results = (await app.client.$$('.torrentRow .torrentName')).length
|
||||
assert(results == 1)
|
||||
})
|
||||
});
|
Loading…
Reference in New Issue
Block a user