feat(server): seperate server part and ability to run server without electron
This commit is contained in:
parent
fac6736710
commit
e244fabd2c
@ -104,7 +104,8 @@
|
|||||||
"test": "mocha temp/e2e.js --require @babel/core/lib --require source-map-support/register",
|
"test": "mocha temp/e2e.js --require @babel/core/lib --require source-map-support/register",
|
||||||
"start": "node build/start.js",
|
"start": "node build/start.js",
|
||||||
"prebuild": "webpack --config=build/webpack.app.config.js --env=production",
|
"prebuild": "webpack --config=build/webpack.app.config.js --env=production",
|
||||||
"build": "electron-builder"
|
"build": "electron-builder",
|
||||||
|
"server": "node src/background/server.js"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"bencode": "^1.0.0",
|
"bencode": "^1.0.0",
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
const ipaddr = require('ipaddr.js');
|
const ipaddr = require('ipaddr.js');
|
||||||
import forBigTable from './forBigTable'
|
const forBigTable = require('./forBigTable')
|
||||||
|
|
||||||
module.exports = ({
|
module.exports = ({
|
||||||
sphinx,
|
sphinx,
|
||||||
|
@ -9,7 +9,6 @@ import os from 'os';
|
|||||||
import { app, Menu, ipcMain, Tray, dialog } from "electron";
|
import { app, Menu, ipcMain, Tray, dialog } from "electron";
|
||||||
import createWindow from "./helpers/window";
|
import createWindow from "./helpers/window";
|
||||||
import { autoUpdater } from 'electron-updater'
|
import { autoUpdater } from 'electron-updater'
|
||||||
import appPath from './electronAppPath'
|
|
||||||
|
|
||||||
import { devMenuTemplate } from "./menu/dev_menu_template";
|
import { devMenuTemplate } from "./menu/dev_menu_template";
|
||||||
import { editMenuTemplate } from "./menu/edit_menu_template";
|
import { editMenuTemplate } from "./menu/edit_menu_template";
|
||||||
@ -20,10 +19,7 @@ import { manageMenuTemplate } from "./menu/manage_menu_template";
|
|||||||
// Special module holding environment variables which you declared
|
// Special module holding environment variables which you declared
|
||||||
// in config/env_xxx.json file.
|
// in config/env_xxx.json file.
|
||||||
import env from "env";
|
import env from "env";
|
||||||
|
import fs from 'fs';
|
||||||
const { spawn, exec } = require('child_process')
|
|
||||||
const fs = require('fs')
|
|
||||||
const iconv = require('iconv-lite');
|
|
||||||
|
|
||||||
// plugins and dev tool
|
// plugins and dev tool
|
||||||
require('electron-context-menu')({})
|
require('electron-context-menu')({})
|
||||||
@ -59,9 +55,10 @@ if(env.name === "production") {
|
|||||||
|
|
||||||
const resourcesPath = env.name === "production" ? process.resourcesPath : 'resources'
|
const resourcesPath = env.name === "production" ? process.resourcesPath : 'resources'
|
||||||
|
|
||||||
const spiderCall = require('./spider')
|
|
||||||
const appConfig = require('./config')
|
const appConfig = require('./config')
|
||||||
|
const spiderCall = require('./spider')
|
||||||
const dbPatcher = require('./dbPatcher')
|
const dbPatcher = require('./dbPatcher')
|
||||||
|
const startSphinx = require('./sphinx')
|
||||||
|
|
||||||
let mainWindow = undefined
|
let mainWindow = undefined
|
||||||
let sphinx = undefined
|
let sphinx = undefined
|
||||||
@ -108,173 +105,6 @@ if (shouldQuit) {
|
|||||||
app.exit(0);
|
app.exit(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
const writeSphinxConfig = (path, dbPath) => {
|
|
||||||
let config = `
|
|
||||||
index torrents
|
|
||||||
{
|
|
||||||
type = rt
|
|
||||||
path = ${dbPath}/database/torrents
|
|
||||||
|
|
||||||
rt_attr_string = hash
|
|
||||||
rt_attr_string = name
|
|
||||||
rt_field = nameIndex
|
|
||||||
rt_attr_bigint = size
|
|
||||||
rt_attr_uint = files
|
|
||||||
rt_attr_uint = piecelength
|
|
||||||
rt_attr_timestamp = added
|
|
||||||
rt_attr_string = ipv4
|
|
||||||
rt_attr_uint = port
|
|
||||||
rt_attr_string = contentType
|
|
||||||
rt_attr_string = contentCategory
|
|
||||||
rt_attr_uint = seeders
|
|
||||||
rt_attr_uint = leechers
|
|
||||||
rt_attr_uint = completed
|
|
||||||
rt_attr_timestamp = trackersChecked
|
|
||||||
rt_attr_uint = good
|
|
||||||
rt_attr_uint = bad
|
|
||||||
}
|
|
||||||
|
|
||||||
index files
|
|
||||||
{
|
|
||||||
type = rt
|
|
||||||
path = ${dbPath}/database/files
|
|
||||||
|
|
||||||
rt_attr_string = path
|
|
||||||
rt_field = pathIndex
|
|
||||||
rt_attr_string = hash
|
|
||||||
rt_attr_bigint = size
|
|
||||||
}
|
|
||||||
|
|
||||||
index version
|
|
||||||
{
|
|
||||||
type = rt
|
|
||||||
path = ${dbPath}/database/version
|
|
||||||
|
|
||||||
rt_attr_uint = version
|
|
||||||
rt_field = versionIndex
|
|
||||||
}
|
|
||||||
|
|
||||||
searchd
|
|
||||||
{
|
|
||||||
listen = 9312
|
|
||||||
listen = 9306:mysql41
|
|
||||||
read_timeout = 5
|
|
||||||
max_children = 30
|
|
||||||
seamless_rotate = 1
|
|
||||||
preopen_indexes = 1
|
|
||||||
unlink_old = 1
|
|
||||||
workers = threads # for RT to work
|
|
||||||
pid_file = ${path}/searchd.pid
|
|
||||||
log = ${path}/searchd.log
|
|
||||||
query_log = ${path}/query.log
|
|
||||||
binlog_path = ${path}
|
|
||||||
}
|
|
||||||
`;
|
|
||||||
|
|
||||||
// clear dir in test env
|
|
||||||
if(env.name === 'test')
|
|
||||||
{
|
|
||||||
if (fs.existsSync(`${dbPath}/database`)) {
|
|
||||||
fs.readdirSync(`${dbPath}/database`).forEach(function(file, index){
|
|
||||||
const curPath = `${dbPath}/database` + "/" + file;
|
|
||||||
if (!fs.lstatSync(curPath).isDirectory()) {
|
|
||||||
fs.unlinkSync(curPath);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
fs.readdirSync(path).forEach(function(file, index){
|
|
||||||
if(!file.startsWith('binlog'))
|
|
||||||
return;
|
|
||||||
const curPath = path + "/" + file;
|
|
||||||
if (!fs.lstatSync(curPath).isDirectory()) {
|
|
||||||
fs.unlinkSync(curPath);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let isInitDb = false
|
|
||||||
|
|
||||||
if (!fs.existsSync(`${dbPath}/database`)){
|
|
||||||
fs.mkdirSync(`${dbPath}/database`);
|
|
||||||
isInitDb = true
|
|
||||||
}
|
|
||||||
|
|
||||||
if(/^win/.test(process.platform))
|
|
||||||
config = iconv.encode(config, 'win1251')
|
|
||||||
|
|
||||||
fs.writeFileSync(`${path}/sphinx.conf`, config)
|
|
||||||
console.log(`writed sphinx config to ${path}`)
|
|
||||||
console.log('db path:', dbPath)
|
|
||||||
|
|
||||||
return {isInitDb}
|
|
||||||
}
|
|
||||||
|
|
||||||
const sphinxPath = path.resolve(appPath('searchd'))
|
|
||||||
console.log('Sphinx Path:', sphinxPath)
|
|
||||||
|
|
||||||
const startSphinx = (callback) => {
|
|
||||||
const sphinxConfigDirectory = app.getPath("userData")
|
|
||||||
appConfig['dbPath'] = appConfig.dbPath && appConfig.dbPath.length > 0 ? appConfig.dbPath : sphinxConfigDirectory;
|
|
||||||
// on portable dir can move database directory
|
|
||||||
if(!fs.existsSync(appConfig.dbPath) && fs.existsSync(sphinxConfigDirectory))
|
|
||||||
{
|
|
||||||
appConfig['dbPath'] = sphinxConfigDirectory
|
|
||||||
}
|
|
||||||
|
|
||||||
const { isInitDb } = writeSphinxConfig(sphinxConfigDirectory, appConfig.dbPath)
|
|
||||||
|
|
||||||
const config = `${sphinxConfigDirectory}/sphinx.conf`
|
|
||||||
const options = ['--config', config]
|
|
||||||
if(!(/^win/.test(process.platform)))
|
|
||||||
{
|
|
||||||
options.push('--nodetach')
|
|
||||||
}
|
|
||||||
sphinx = spawn(sphinxPath, options)
|
|
||||||
// remeber initizalizing of db
|
|
||||||
sphinx.isInitDb = isInitDb
|
|
||||||
sphinx.directoryPath = appConfig.dbPath
|
|
||||||
sphinx.directoryPathDb = appConfig.dbPath + '/database'
|
|
||||||
|
|
||||||
const optimizeResolvers = {}
|
|
||||||
|
|
||||||
sphinx.stdout.on('data', (data) => {
|
|
||||||
console.log(`sphinx: ${data}`)
|
|
||||||
if (data.includes('accepting connections')) {
|
|
||||||
console.log('catched sphinx start')
|
|
||||||
if(callback)
|
|
||||||
callback()
|
|
||||||
}
|
|
||||||
|
|
||||||
const checkOptimized = String(data).match(/index ([\w]+): optimized/)
|
|
||||||
if(checkOptimized)
|
|
||||||
{
|
|
||||||
if(optimizeResolvers[checkOptimized[1]])
|
|
||||||
{
|
|
||||||
console.log('resolve optimizer', checkOptimized[1])
|
|
||||||
optimizeResolvers[checkOptimized[1]]()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
sphinx.on('close', (code, signal) => {
|
|
||||||
console.log(`sphinx closed with code ${code} and signal ${signal}`)
|
|
||||||
app.quit()
|
|
||||||
})
|
|
||||||
|
|
||||||
sphinx.stop = () => {
|
|
||||||
console.log('sphinx closing...')
|
|
||||||
exec(`"${sphinxPath}" --config "${config}" --stopwait`)
|
|
||||||
}
|
|
||||||
|
|
||||||
sphinx.waitOptimized = (table) => new Promise((resolve) => {
|
|
||||||
optimizeResolvers[table] = () => {
|
|
||||||
delete optimizeResolvers[table];
|
|
||||||
resolve()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// log autoupdate
|
// log autoupdate
|
||||||
const log = require('electron-log')
|
const log = require('electron-log')
|
||||||
log.transports.file.level = false;
|
log.transports.file.level = false;
|
||||||
@ -306,7 +136,7 @@ autoUpdater.on('update-downloaded', () => {
|
|||||||
let tray = undefined
|
let tray = undefined
|
||||||
|
|
||||||
app.on("ready", () => {
|
app.on("ready", () => {
|
||||||
startSphinx(() => {
|
sphinx = startSphinx(() => {
|
||||||
|
|
||||||
mainWindow = createWindow("main", {
|
mainWindow = createWindow("main", {
|
||||||
width: 1000,
|
width: 1000,
|
||||||
@ -394,7 +224,7 @@ app.on("ready", () => {
|
|||||||
})
|
})
|
||||||
}, app.getPath("userData"), app.getVersion(), env.name)
|
}, app.getPath("userData"), app.getVersion(), env.name)
|
||||||
}, mainWindow, sphinx)
|
}, mainWindow, sphinx)
|
||||||
})
|
}, app.getPath("userData"), () => app.quit())
|
||||||
});
|
});
|
||||||
|
|
||||||
let stopProtect = false
|
let stopProtect = false
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { app } from 'electron'
|
const { app } = require('electron')
|
||||||
const os = require('os')
|
const os = require('os')
|
||||||
|
|
||||||
let config = {
|
let config = {
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
import config from './config'
|
const config = require('./config')
|
||||||
import mysql from 'mysql'
|
const mysql = require( 'mysql')
|
||||||
import forBigTable from './forBigTable'
|
const forBigTable = require('./forBigTable')
|
||||||
import { BrowserWindow } from "electron";
|
const { BrowserWindow } = require("electron");
|
||||||
import url from 'url'
|
const url = require('url')
|
||||||
import path from 'path'
|
const path = require('path')
|
||||||
import fs from 'fs'
|
const fs = require('fs')
|
||||||
|
|
||||||
const currentVersion = 3
|
const currentVersion = 3
|
||||||
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import path from 'path'
|
const path = require('path')
|
||||||
const fs = require('fs')
|
const fs = require('fs')
|
||||||
|
|
||||||
export default (app) => {
|
module.exports = (app) => {
|
||||||
if (fs.existsSync(`./${app}`)) {
|
if (fs.existsSync(`./${app}`)) {
|
||||||
return `./${app}`
|
return `./${app}`
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
export default (sphinx, table, callback, doneCallback, max = 1000) => new Promise((done) => {
|
module.exports = (sphinx, table, callback, doneCallback, max = 1000) => new Promise((done) => {
|
||||||
const checker = (index = 0) => {
|
const checker = (index = 0) => {
|
||||||
sphinx.query(`SELECT * FROM ${table} WHERE id > ${index} LIMIT ${max}`, (err, torrents) => {
|
sphinx.query(`SELECT * FROM ${table} WHERE id > ${index} LIMIT ${max}`, (err, torrents) => {
|
||||||
if(err || torrents.length == 0)
|
if(err || torrents.length == 0)
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import ssh from './ssh'
|
const ssh = require('./ssh')
|
||||||
import shuffle from './shuffle'
|
const shuffle = require('./shuffle')
|
||||||
const config = require('./config');
|
const config = require('./config');
|
||||||
const net = require('net')
|
const net = require('net')
|
||||||
const JsonSocket = require('json-socket')
|
const JsonSocket = require('json-socket')
|
||||||
@ -7,18 +7,17 @@ const os = require('os');
|
|||||||
const isPortReachable = require('./isPortReachable')
|
const isPortReachable = require('./isPortReachable')
|
||||||
|
|
||||||
class p2p {
|
class p2p {
|
||||||
peers = []
|
|
||||||
ignoreAddresses = ['127.0.0.1']
|
|
||||||
messageHandlers = {}
|
|
||||||
externalPeers = []
|
|
||||||
size = 0
|
|
||||||
p2pStatus = 0
|
|
||||||
version = '0'
|
|
||||||
|
|
||||||
info = {}
|
|
||||||
|
|
||||||
constructor(send = () => {})
|
constructor(send = () => {})
|
||||||
{
|
{
|
||||||
|
this.peers = []
|
||||||
|
this.ignoreAddresses = ['127.0.0.1']
|
||||||
|
this.messageHandlers = {}
|
||||||
|
this.externalPeers = []
|
||||||
|
this.size = 0
|
||||||
|
this.p2pStatus = 0
|
||||||
|
this.version = '0'
|
||||||
|
this.info = {}
|
||||||
|
|
||||||
this.send = send
|
this.send = send
|
||||||
this.tcpServer = net.createServer();
|
this.tcpServer = net.createServer();
|
||||||
this.tcpServer.maxConnections = config.p2pConnections * 2;
|
this.tcpServer.maxConnections = config.p2pConnections * 2;
|
||||||
|
14
src/background/server.js
Normal file
14
src/background/server.js
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
const appConfig = require('./config')
|
||||||
|
const spiderCall = require('./spider')
|
||||||
|
const dbPatcher = require('./dbPatcher')
|
||||||
|
const startSphinx = require('./sphinx')
|
||||||
|
|
||||||
|
sphinx = startSphinx(() => {
|
||||||
|
dbPatcher(() => {
|
||||||
|
spider = spiderCall((...data) => {
|
||||||
|
|
||||||
|
}, (message, callback) => {
|
||||||
|
|
||||||
|
}, './', '0.7.1', 'development')
|
||||||
|
}, null, sphinx)
|
||||||
|
}, './', () => {})
|
@ -2,7 +2,7 @@
|
|||||||
* Shuffles array in place.
|
* Shuffles array in place.
|
||||||
* @param {Array} a items An array containing the items.
|
* @param {Array} a items An array containing the items.
|
||||||
*/
|
*/
|
||||||
export default function shuffle(a) {
|
module.exports = function shuffle(a) {
|
||||||
let j, x, i;
|
let j, x, i;
|
||||||
for (i = a.length - 1; i > 0; i--) {
|
for (i = a.length - 1; i > 0; i--) {
|
||||||
j = Math.floor(Math.random() * (i + 1));
|
j = Math.floor(Math.random() * (i + 1));
|
||||||
|
180
src/background/sphinx.js
Normal file
180
src/background/sphinx.js
Normal file
@ -0,0 +1,180 @@
|
|||||||
|
const path = require("path");
|
||||||
|
let env
|
||||||
|
try{
|
||||||
|
env = require("env");
|
||||||
|
} catch(e){}
|
||||||
|
const appPath = require('./electronAppPath')
|
||||||
|
const fs = require('fs')
|
||||||
|
const iconv = require('iconv-lite')
|
||||||
|
const { spawn, exec } = require('child_process')
|
||||||
|
const appConfig = require('./config')
|
||||||
|
|
||||||
|
const writeSphinxConfig = (path, dbPath) => {
|
||||||
|
let config = `
|
||||||
|
index torrents
|
||||||
|
{
|
||||||
|
type = rt
|
||||||
|
path = ${dbPath}/database/torrents
|
||||||
|
|
||||||
|
rt_attr_string = hash
|
||||||
|
rt_attr_string = name
|
||||||
|
rt_field = nameIndex
|
||||||
|
rt_attr_bigint = size
|
||||||
|
rt_attr_uint = files
|
||||||
|
rt_attr_uint = piecelength
|
||||||
|
rt_attr_timestamp = added
|
||||||
|
rt_attr_string = ipv4
|
||||||
|
rt_attr_uint = port
|
||||||
|
rt_attr_string = contentType
|
||||||
|
rt_attr_string = contentCategory
|
||||||
|
rt_attr_uint = seeders
|
||||||
|
rt_attr_uint = leechers
|
||||||
|
rt_attr_uint = completed
|
||||||
|
rt_attr_timestamp = trackersChecked
|
||||||
|
rt_attr_uint = good
|
||||||
|
rt_attr_uint = bad
|
||||||
|
}
|
||||||
|
|
||||||
|
index files
|
||||||
|
{
|
||||||
|
type = rt
|
||||||
|
path = ${dbPath}/database/files
|
||||||
|
|
||||||
|
rt_attr_string = path
|
||||||
|
rt_field = pathIndex
|
||||||
|
rt_attr_string = hash
|
||||||
|
rt_attr_bigint = size
|
||||||
|
}
|
||||||
|
|
||||||
|
index version
|
||||||
|
{
|
||||||
|
type = rt
|
||||||
|
path = ${dbPath}/database/version
|
||||||
|
|
||||||
|
rt_attr_uint = version
|
||||||
|
rt_field = versionIndex
|
||||||
|
}
|
||||||
|
|
||||||
|
searchd
|
||||||
|
{
|
||||||
|
listen = 9312
|
||||||
|
listen = 9306:mysql41
|
||||||
|
read_timeout = 5
|
||||||
|
max_children = 30
|
||||||
|
seamless_rotate = 1
|
||||||
|
preopen_indexes = 1
|
||||||
|
unlink_old = 1
|
||||||
|
workers = threads # for RT to work
|
||||||
|
pid_file = ${path}/searchd.pid
|
||||||
|
log = ${path}/searchd.log
|
||||||
|
query_log = ${path}/query.log
|
||||||
|
binlog_path = ${path}
|
||||||
|
}
|
||||||
|
`;
|
||||||
|
|
||||||
|
// clear dir in test env
|
||||||
|
if(env && env.name === 'test')
|
||||||
|
{
|
||||||
|
if (fs.existsSync(`${dbPath}/database`)) {
|
||||||
|
fs.readdirSync(`${dbPath}/database`).forEach(function(file, index){
|
||||||
|
const curPath = `${dbPath}/database` + "/" + file;
|
||||||
|
if (!fs.lstatSync(curPath).isDirectory()) {
|
||||||
|
fs.unlinkSync(curPath);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
fs.readdirSync(path).forEach(function(file, index){
|
||||||
|
if(!file.startsWith('binlog'))
|
||||||
|
return;
|
||||||
|
const curPath = path + "/" + file;
|
||||||
|
if (!fs.lstatSync(curPath).isDirectory()) {
|
||||||
|
fs.unlinkSync(curPath);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let isInitDb = false
|
||||||
|
|
||||||
|
if (!fs.existsSync(`${dbPath}/database`)){
|
||||||
|
fs.mkdirSync(`${dbPath}/database`);
|
||||||
|
isInitDb = true
|
||||||
|
}
|
||||||
|
|
||||||
|
if(/^win/.test(process.platform))
|
||||||
|
config = iconv.encode(config, 'win1251')
|
||||||
|
|
||||||
|
fs.writeFileSync(`${path}/sphinx.conf`, config)
|
||||||
|
console.log(`writed sphinx config to ${path}`)
|
||||||
|
console.log('db path:', dbPath)
|
||||||
|
|
||||||
|
return {isInitDb}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = (callback, dataDirectory, onClose) => {
|
||||||
|
const sphinxPath = path.resolve(appPath('searchd'))
|
||||||
|
console.log('Sphinx Path:', sphinxPath)
|
||||||
|
|
||||||
|
const sphinxConfigDirectory = dataDirectory
|
||||||
|
appConfig['dbPath'] = appConfig.dbPath && appConfig.dbPath.length > 0 ? appConfig.dbPath : sphinxConfigDirectory;
|
||||||
|
// on portable dir can move database directory
|
||||||
|
if(!fs.existsSync(appConfig.dbPath) && fs.existsSync(sphinxConfigDirectory))
|
||||||
|
{
|
||||||
|
appConfig['dbPath'] = sphinxConfigDirectory
|
||||||
|
}
|
||||||
|
|
||||||
|
const { isInitDb } = writeSphinxConfig(sphinxConfigDirectory, appConfig.dbPath)
|
||||||
|
|
||||||
|
const config = `${sphinxConfigDirectory}/sphinx.conf`
|
||||||
|
const options = ['--config', config]
|
||||||
|
if(!(/^win/.test(process.platform)))
|
||||||
|
{
|
||||||
|
options.push('--nodetach')
|
||||||
|
}
|
||||||
|
const sphinx = spawn(sphinxPath, options)
|
||||||
|
// remeber initizalizing of db
|
||||||
|
sphinx.isInitDb = isInitDb
|
||||||
|
sphinx.directoryPath = appConfig.dbPath
|
||||||
|
sphinx.directoryPathDb = appConfig.dbPath + '/database'
|
||||||
|
|
||||||
|
const optimizeResolvers = {}
|
||||||
|
|
||||||
|
sphinx.stdout.on('data', (data) => {
|
||||||
|
console.log(`sphinx: ${data}`)
|
||||||
|
if (data.includes('accepting connections')) {
|
||||||
|
console.log('catched sphinx start')
|
||||||
|
if(callback)
|
||||||
|
callback()
|
||||||
|
}
|
||||||
|
|
||||||
|
const checkOptimized = String(data).match(/index ([\w]+): optimized/)
|
||||||
|
if(checkOptimized)
|
||||||
|
{
|
||||||
|
if(optimizeResolvers[checkOptimized[1]])
|
||||||
|
{
|
||||||
|
console.log('resolve optimizer', checkOptimized[1])
|
||||||
|
optimizeResolvers[checkOptimized[1]]()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
sphinx.on('close', (code, signal) => {
|
||||||
|
console.log(`sphinx closed with code ${code} and signal ${signal}`)
|
||||||
|
if(onClose)
|
||||||
|
onClose()
|
||||||
|
})
|
||||||
|
|
||||||
|
sphinx.stop = () => {
|
||||||
|
console.log('sphinx closing...')
|
||||||
|
exec(`"${sphinxPath}" --config "${config}" --stopwait`)
|
||||||
|
}
|
||||||
|
|
||||||
|
sphinx.waitOptimized = (table) => new Promise((resolve) => {
|
||||||
|
optimizeResolvers[table] = () => {
|
||||||
|
delete optimizeResolvers[table];
|
||||||
|
resolve()
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
return sphinx
|
||||||
|
}
|
@ -1,4 +1,4 @@
|
|||||||
import appPath from './electronAppPath'
|
const appPath = require('./electronAppPath')
|
||||||
const { spawn } = require('child_process')
|
const { spawn } = require('child_process')
|
||||||
|
|
||||||
function getRandomInt(min, max) {
|
function getRandomInt(min, max) {
|
||||||
@ -78,4 +78,4 @@ const startSSH = (port, host, user, password, callback) => {
|
|||||||
return ssh
|
return ssh
|
||||||
}
|
}
|
||||||
|
|
||||||
export default startSSH
|
module.exports = startSSH
|
Loading…
Reference in New Issue
Block a user