Merge branch 'master' into services

# Conflicts:
#	package-lock.json
This commit is contained in:
Alexey Kasyanchuk
2018-08-08 06:59:53 +03:00
28 changed files with 702 additions and 612 deletions

3
.gitignore vendored
View File

@ -7,6 +7,8 @@ binlog.*
*.pid
version.vrs
sphinx.conf
*.p2p
downloads.json
/dist
/temp
@ -17,4 +19,3 @@ sphinx.conf
/app/app.js
/app/background.js
/app/**/*.map

View File

@ -1,3 +1,57 @@
# [0.28.0](https://github.com/DEgITx/rats-search/compare/v0.27.0...v0.28.0) (2018-08-07)
### Bug Fixes
* **closing:** fix errors on closing ([e04548a](https://github.com/DEgITx/rats-search/commit/e04548a))
* **db:** under mac and linux using alternative pool mechanism ([a3644c0](https://github.com/DEgITx/rats-search/commit/a3644c0))
* **log:** color log (part 2) ([ea8d93e](https://github.com/DEgITx/rats-search/commit/ea8d93e))
* **log:** color log (part 3) ([bc23379](https://github.com/DEgITx/rats-search/commit/bc23379))
* **log:** color messages (part 1) ([27b224d](https://github.com/DEgITx/rats-search/commit/27b224d))
* **server:** color log server fix ([17b377c](https://github.com/DEgITx/rats-search/commit/17b377c))
### Features
* **log:** color log ([62bbc46](https://github.com/DEgITx/rats-search/commit/62bbc46))
* **log:** error display with separate color [#31](https://github.com/DEgITx/rats-search/issues/31) ([70dd4a3](https://github.com/DEgITx/rats-search/commit/70dd4a3))
### Performance Improvements
* **replication:** replicate number accordion to cpu usage ([6af3b7a](https://github.com/DEgITx/rats-search/commit/6af3b7a))
* **torrents:** ability to disable integrity check on torrents adding torrents [#47](https://github.com/DEgITx/rats-search/issues/47) ([080fc92](https://github.com/DEgITx/rats-search/commit/080fc92))
# [0.27.0](https://github.com/DEgITx/rats-search/compare/v0.26.2...v0.27.0) (2018-08-06)
### Bug Fixes
* **background:** one closing pattern ([63158dc](https://github.com/DEgITx/rats-search/commit/63158dc))
* **closing:** window can be closing on event ([84e9573](https://github.com/DEgITx/rats-search/commit/84e9573))
* **gui:** top tabs text overlap ([45168a2](https://github.com/DEgITx/rats-search/commit/45168a2))
* **linux:** fix closing on linux ([75ad00a](https://github.com/DEgITx/rats-search/commit/75ad00a))
* **linux:** fix console control after exit ([29cd05a](https://github.com/DEgITx/rats-search/commit/29cd05a))
* **macos:** fix crashes under Mac OS X ([015447c](https://github.com/DEgITx/rats-search/commit/015447c))
* **macos:** stabilization with connection pool ([769521f](https://github.com/DEgITx/rats-search/commit/769521f))
* **scanner:** fix enconding names in some cases [#55](https://github.com/DEgITx/rats-search/issues/55) ([f1043eb](https://github.com/DEgITx/rats-search/commit/f1043eb))
* **server:** fix exit on server version [#54](https://github.com/DEgITx/rats-search/issues/54) [#52](https://github.com/DEgITx/rats-search/issues/52) ([4109ef9](https://github.com/DEgITx/rats-search/commit/4109ef9))
* **translations:** hash translation ([f5a6f17](https://github.com/DEgITx/rats-search/commit/f5a6f17))
### Features
* **cleaning:** fix cleaning checking and removing torrents (also display cleaning status in more details) [#52](https://github.com/DEgITx/rats-search/issues/52) ([7e0c565](https://github.com/DEgITx/rats-search/commit/7e0c565))
* **closing:** fast window closing/hiding ([019700e](https://github.com/DEgITx/rats-search/commit/019700e))
* **search:** add remote torrents in db via dht and search requests ([1e44164](https://github.com/DEgITx/rats-search/commit/1e44164))
* **search:** hash/magnet search support in db ([1e57789](https://github.com/DEgITx/rats-search/commit/1e57789))
* **torrents:** add support for dropping torrent to base just with window ([6d82291](https://github.com/DEgITx/rats-search/commit/6d82291))
### Performance Improvements
* **replication:** replication thread optimization ([c5427a6](https://github.com/DEgITx/rats-search/commit/c5427a6))
## [0.26.2](https://github.com/DEgITx/rats-search/compare/v0.26.1...v0.26.2) (2018-07-22)

231
package-lock.json generated
View File

@ -1,6 +1,6 @@
{
"name": "rats-search",
"version": "0.26.2",
"version": "0.27.0",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
@ -1684,9 +1684,10 @@
"dev": true
},
"@types/node": {
"version": "8.10.22",
"resolved": "https://registry.npmjs.org/@types/node/-/node-8.10.22.tgz",
"integrity": "sha512-HCJ1dUJEQVFRekwBAlyv9pJ+2rzxq9uimSmsK2q7YDYMbXR3b4BXcO9rsN+36ZBwSWQ5BNh5o8xdZijDSonS5A=="
"version": "8.10.23",
"resolved": "https://registry.npmjs.org/@types/node/-/node-8.10.23.tgz",
"integrity": "sha512-aEp5ZTLr4mYhR9S85cJ+sEYkcsgFY10N1Si5m49iTAVzanZXOwp/pgw6ibFLKXxpflqm71aSWZCRtnTXXO56gA==",
"dev": true
},
"@webassemblyjs/ast": {
"version": "1.4.3",
@ -1963,6 +1964,11 @@
"integrity": "sha1-SlKCrBZHKek2Gbz9OtFR+BfOkfU=",
"dev": true
},
"ansi-256-colors": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/ansi-256-colors/-/ansi-256-colors-1.1.0.tgz",
"integrity": "sha1-kQ3lDvzHwJ49gvL4er1rcAwYgYo="
},
"ansi-align": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/ansi-align/-/ansi-align-2.0.0.tgz",
@ -4229,7 +4235,8 @@
"boolbase": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz",
"integrity": "sha1-aN/1++YMUes3cl6p4+0xDcwed24="
"integrity": "sha1-aN/1++YMUes3cl6p4+0xDcwed24=",
"dev": true
},
"boom": {
"version": "4.3.1",
@ -4913,74 +4920,6 @@
"integrity": "sha1-V00xLt2Iu13YkS6Sht1sCu1KrII=",
"dev": true
},
"cheerio": {
"version": "1.0.0-rc.2",
"resolved": "https://registry.npmjs.org/cheerio/-/cheerio-1.0.0-rc.2.tgz",
"integrity": "sha1-S59TqBsn5NXawxwP/Qz6A8xoMNs=",
"requires": {
"css-select": "~1.2.0",
"dom-serializer": "~0.1.0",
"entities": "~1.1.1",
"htmlparser2": "^3.9.1",
"lodash": "^4.15.0",
"parse5": "^3.0.1"
},
"dependencies": {
"domhandler": {
"version": "2.4.2",
"resolved": "https://registry.npmjs.org/domhandler/-/domhandler-2.4.2.tgz",
"integrity": "sha512-JiK04h0Ht5u/80fdLMCEmV4zkNh2BcoMFBmZ/91WtYZ8qVXSKjiw7fXMgFPnHcSZgOo3XdinHvmnDUeMf5R4wA==",
"requires": {
"domelementtype": "1"
}
},
"htmlparser2": {
"version": "3.9.2",
"resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-3.9.2.tgz",
"integrity": "sha1-G9+HrMoPP55T+k/M6w9LTLsAszg=",
"requires": {
"domelementtype": "^1.3.0",
"domhandler": "^2.3.0",
"domutils": "^1.5.1",
"entities": "^1.1.1",
"inherits": "^2.0.1",
"readable-stream": "^2.0.2"
}
},
"isarray": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz",
"integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE="
},
"process-nextick-args": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.0.tgz",
"integrity": "sha512-MtEC1TqN0EU5nephaJ4rAtThHtC86dNN9qCuEhtshvpVBkAW5ZO7BASN9REnF9eoXGcRub+pFuKEpOHE+HbEMw=="
},
"readable-stream": {
"version": "2.3.6",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.6.tgz",
"integrity": "sha512-tQtKA9WIAhBF3+VLAseyMqZeBjW0AHJoxOtYqSUZNJxauErmLbVm2FW1y+J/YA9dUrAC39ITejlZWhVIwawkKw==",
"requires": {
"core-util-is": "~1.0.0",
"inherits": "~2.0.3",
"isarray": "~1.0.0",
"process-nextick-args": "~2.0.0",
"safe-buffer": "~5.1.1",
"string_decoder": "~1.1.1",
"util-deprecate": "~1.0.1"
}
},
"string_decoder": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz",
"integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==",
"requires": {
"safe-buffer": "~5.1.0"
}
}
}
},
"chokidar": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/chokidar/-/chokidar-2.0.3.tgz",
@ -6119,6 +6058,7 @@
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/css-select/-/css-select-1.2.0.tgz",
"integrity": "sha1-KzoRBTnFNV8c2NMUYj6HCxIeyFg=",
"dev": true,
"requires": {
"boolbase": "~1.0.0",
"css-what": "2.1",
@ -6180,7 +6120,8 @@
"css-what": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/css-what/-/css-what-2.1.0.tgz",
"integrity": "sha1-lGfQMsOM+u+58teVASUwYvh/ob0="
"integrity": "sha1-lGfQMsOM+u+58teVASUwYvh/ob0=",
"dev": true
},
"cssesc": {
"version": "0.1.0",
@ -6570,6 +6511,7 @@
"version": "0.1.0",
"resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-0.1.0.tgz",
"integrity": "sha1-BzxpdUbOB4DOI75KKOKT5AvDDII=",
"dev": true,
"requires": {
"domelementtype": "~1.1.1",
"entities": "~1.1.1"
@ -6578,7 +6520,8 @@
"domelementtype": {
"version": "1.1.3",
"resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-1.1.3.tgz",
"integrity": "sha1-vSh3PiZCiBrsUVRJJCmcXNgiGFs="
"integrity": "sha1-vSh3PiZCiBrsUVRJJCmcXNgiGFs=",
"dev": true
}
}
},
@ -6600,7 +6543,8 @@
"domelementtype": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-1.3.0.tgz",
"integrity": "sha1-sXrtguirWeUt2cGbF1bg/BhyBMI="
"integrity": "sha1-sXrtguirWeUt2cGbF1bg/BhyBMI=",
"dev": true
},
"domhandler": {
"version": "2.1.0",
@ -6615,6 +6559,7 @@
"version": "1.5.1",
"resolved": "https://registry.npmjs.org/domutils/-/domutils-1.5.1.tgz",
"integrity": "sha1-3NhIiib1Y9YQeeSMn3t+Mjc2gs8=",
"dev": true,
"requires": {
"dom-serializer": "0",
"domelementtype": "1"
@ -6725,9 +6670,9 @@
"dev": true
},
"electron": {
"version": "2.0.5",
"resolved": "https://registry.npmjs.org/electron/-/electron-2.0.5.tgz",
"integrity": "sha512-NbWsgAvcxxQrDNaLA2L5adZTKWO6mZwC57uSPQiZiFjpO0K6uVNCjFyRbLnhq8AWq2tmcuzs6mFpIzQXmvlnUQ==",
"version": "2.0.6",
"resolved": "https://registry.npmjs.org/electron/-/electron-2.0.6.tgz",
"integrity": "sha512-1UHBWHF2EMjjVyTvcdcUBmISnoxElY4cUgkFVslw5pM1HxTVzi2vev+8NBohdLLFGbIbPyNua5vcBg+bxo1rqw==",
"dev": true,
"requires": {
"@types/node": "^8.0.24",
@ -7407,7 +7352,8 @@
"entities": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/entities/-/entities-1.1.1.tgz",
"integrity": "sha1-blwtClYhtdra7O+AuQ7ftc13cvA="
"integrity": "sha1-blwtClYhtdra7O+AuQ7ftc13cvA=",
"dev": true
},
"env-paths": {
"version": "1.0.0",
@ -9589,94 +9535,6 @@
"minimatch": "~3.0.2"
}
},
"google": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/google/-/google-2.1.0.tgz",
"integrity": "sha1-Edl8m1QvoqxMOlDLzZwUyC9qgQE=",
"requires": {
"cheerio": "^0.19.0",
"request": "^2.54.0"
},
"dependencies": {
"cheerio": {
"version": "0.19.0",
"resolved": "https://registry.npmjs.org/cheerio/-/cheerio-0.19.0.tgz",
"integrity": "sha1-dy5wFfLuKZZQltcepBdbdas1SSU=",
"requires": {
"css-select": "~1.0.0",
"dom-serializer": "~0.1.0",
"entities": "~1.1.1",
"htmlparser2": "~3.8.1",
"lodash": "^3.2.0"
}
},
"css-select": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/css-select/-/css-select-1.0.0.tgz",
"integrity": "sha1-sRIcpRhI3SZOIkTQWM7iVN7rRLA=",
"requires": {
"boolbase": "~1.0.0",
"css-what": "1.0",
"domutils": "1.4",
"nth-check": "~1.0.0"
}
},
"css-what": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/css-what/-/css-what-1.0.0.tgz",
"integrity": "sha1-18wt9FGAZm+Z0rFEYmOUaeAPc2w="
},
"domhandler": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/domhandler/-/domhandler-2.3.0.tgz",
"integrity": "sha1-LeWaCCLVAn+r/28DLCsloqir5zg=",
"requires": {
"domelementtype": "1"
}
},
"domutils": {
"version": "1.4.3",
"resolved": "https://registry.npmjs.org/domutils/-/domutils-1.4.3.tgz",
"integrity": "sha1-CGVRN5bGswYDGFDhdVFrr4C3Km8=",
"requires": {
"domelementtype": "1"
}
},
"htmlparser2": {
"version": "3.8.3",
"resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-3.8.3.tgz",
"integrity": "sha1-mWwosZFRaovoZQGn15dX5ccMEGg=",
"requires": {
"domelementtype": "1",
"domhandler": "2.3",
"domutils": "1.5",
"entities": "1.0",
"readable-stream": "1.1"
},
"dependencies": {
"domutils": {
"version": "1.5.1",
"resolved": "https://registry.npmjs.org/domutils/-/domutils-1.5.1.tgz",
"integrity": "sha1-3NhIiib1Y9YQeeSMn3t+Mjc2gs8=",
"requires": {
"dom-serializer": "0",
"domelementtype": "1"
}
},
"entities": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/entities/-/entities-1.0.0.tgz",
"integrity": "sha1-sph6o4ITR/zeZCsk/fyeT7cSvyY="
}
}
},
"lodash": {
"version": "3.10.1",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-3.10.1.tgz",
"integrity": "sha1-W/Rejkm6QYnhfUgnid/RW9FAt7Y="
}
}
},
"got": {
"version": "6.7.1",
"resolved": "https://registry.npmjs.org/got/-/got-6.7.1.tgz",
@ -10777,7 +10635,8 @@
"isarray": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz",
"integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8="
"integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=",
"dev": true
},
"isbinaryfile": {
"version": "3.0.2",
@ -10815,17 +10674,6 @@
"requires": {
"node-fetch": "^1.0.1",
"whatwg-fetch": ">=0.10.0"
},
"dependencies": {
"node-fetch": {
"version": "1.7.3",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-1.7.3.tgz",
"integrity": "sha512-NhZ4CsKx7cYm2vSrBAr2PvFOe6sWDf0UYLRqA6svUYg7+/TSfVAu49jYC4BvQ4Sms9SZgdqGBgroqfDhJdTyKQ==",
"requires": {
"encoding": "^0.1.11",
"is-stream": "^1.0.1"
}
}
}
},
"isstream": {
@ -12566,9 +12414,13 @@
"dev": true
},
"node-fetch": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.2.0.tgz",
"integrity": "sha512-OayFWziIxiHY8bCUyLX6sTpDH8Jsbp4FfYd1j1f7vZyfgkcOnAyM4oQR16f8a0s7Gl/viMGRey8eScYk4V4EZA=="
"version": "1.7.3",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-1.7.3.tgz",
"integrity": "sha512-NhZ4CsKx7cYm2vSrBAr2PvFOe6sWDf0UYLRqA6svUYg7+/TSfVAu49jYC4BvQ4Sms9SZgdqGBgroqfDhJdTyKQ==",
"requires": {
"encoding": "^0.1.11",
"is-stream": "^1.0.1"
}
},
"node-libs-browser": {
"version": "2.1.0",
@ -12750,6 +12602,7 @@
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/nth-check/-/nth-check-1.0.1.tgz",
"integrity": "sha1-mSms32KPwsQQmN6rgqxYDPFJquQ=",
"dev": true,
"requires": {
"boolbase": "~1.0.0"
}
@ -13355,14 +13208,6 @@
}
}
},
"parse5": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/parse5/-/parse5-3.0.3.tgz",
"integrity": "sha512-rgO9Zg5LLLkfJF9E6CCmXlSE4UVceloys8JrFqCcHloC3usd/kJCyPDwH2SOlzix2j3xaP9sUX3e8+kvkuleAA==",
"requires": {
"@types/node": "*"
}
},
"parseqs": {
"version": "0.0.5",
"resolved": "https://registry.npmjs.org/parseqs/-/parseqs-0.0.5.tgz",
@ -14832,6 +14677,7 @@
"version": "1.1.14",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz",
"integrity": "sha1-fPTFTvZI44EwhMY23SB54WbAgdk=",
"dev": true,
"requires": {
"core-util-is": "~1.0.0",
"inherits": "~2.0.1",
@ -16468,7 +16314,8 @@
"string_decoder": {
"version": "0.10.31",
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz",
"integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ="
"integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=",
"dev": true
},
"stringstream": {
"version": "0.0.5",

View File

@ -2,7 +2,7 @@
"name": "rats-search",
"productName": "Rats on The Boat",
"description": "P2P BitTorrent search engine",
"version": "0.26.2",
"version": "0.28.0",
"private": true,
"author": "Alexey Kasyanchuk <degitx@gmail.com>",
"copyright": "Copyright © 2018 Alexey Kasyanchuk",
@ -111,6 +111,7 @@
"buildweb": "node src/background/webpack.js"
},
"dependencies": {
"ansi-256-colors": "^1.1.0",
"bencode": "^2.0.0",
"bitfield": "^2.0.0",
"cheerio": "^1.0.0-rc.2",
@ -156,7 +157,7 @@
"babel-plugin-transform-object-rest-spread": "^7.0.0-beta.3",
"chai": "^4.1.2",
"css-loader": "^0.28.11",
"electron": "2.0.5",
"electron": "2.0.6",
"electron-builder": "20.14.7",
"eslint": "^4.19.1",
"eslint-plugin-react": "^7.9.1",

View File

@ -247,6 +247,18 @@ export default class ConfigPage extends Page {
/>
<div className='fs0-75' style={{color: 'grey'}}>* {__('Enable torrents replication from another rats clients. Dont recomended if torrent scanner works correct')}.</div>
</div>
<div className='column w100p'>
<Toggle
style={{marginTop: '10px'}}
label={__('Check torrent files intergrity')}
toggled={this.options.recheckFilesOnAdding}
onToggle={(e, checked) => {
this.options.recheckFilesOnAdding = checked
this.forceUpdate()
}}
/>
<div className='fs0-75' style={{color: 'grey'}}>* {__('Enable database torrents files intergrity check on adding each torrent. Disable this will free some cpu usage on adding operation.')}</div>
</div>
<div style={{marginTop: 10}}>{__('Torrent network scanner settings')}:</div>

View File

@ -4,9 +4,11 @@ const compareVersions = require('compare-versions');
const getTorrent = require('./gettorrent')
const _ = require('lodash')
const asyncForEach = require('./asyncForEach')
const cpuUsage = require('./bt/cpu-usage-global')
module.exports = async ({
sphinx,
sphinxSingle,
send,
recive,
p2p,
@ -104,7 +106,7 @@ module.exports = async ({
sphinx.query('SELECT count(*) AS torrents, sum(size) AS sz FROM `torrents`', function (error, rows, fields) {
if(!rows) {
console.error(error)
logTE('statistic', error)
callback(undefined)
return;
}
@ -113,7 +115,7 @@ module.exports = async ({
sphinx.query('SELECT count(*) AS files FROM `files`', function (error, rows, fields) {
if(!rows) {
console.error(error)
logTE('statistic', error)
callback(undefined)
return;
}
@ -135,22 +137,22 @@ module.exports = async ({
// remote request
if(options.peer)
{
console.log('remote torrent request to peer')
logT('search', 'remote torrent request to peer')
const peer = p2p.find(options.peer)
if(!peer)
{
console.log('dont found requested peer in peers')
logT('search', 'dont found requested peer in peers')
callback(undefined)
return;
}
delete options.peer;
peer.emit('torrent', {hash, options}, (data, nil, address) => {
console.log('remote torrent result', hash)
logT('search', 'remote torrent result', hash)
callback(data)
if(compareVersions(address.version, '0.19.0') < 0)
{
console.log('replication selected torrent now works only with 0.19.0 version, ignore this torrent')
logT('search', 'replication selected torrent now works only with 0.19.0 version, ignore this torrent')
return
}
@ -184,7 +186,7 @@ module.exports = async ({
});
if(torrent.good != good || torrent.bad != bad)
{
console.log('finded new rating on', torrent.name, 'update votes to it')
logT('rating', 'finded new rating on', torrent.name, 'update votes to it')
torrent.good = good
torrent.bad = bad
updateTorrentToDB(torrent)
@ -202,13 +204,16 @@ module.exports = async ({
if(config.p2pReplicationServer)
{
console.log('p2p replication server enabled')
logT('replication', 'p2p replication server enabled')
p2p.on('randomTorrents', (nil, callback) => {
if(typeof callback != 'function')
return;
sphinx.query('SELECT * FROM `torrents` ORDER BY rand() limit 5', (error, torrents) => {
const cpu = cpuUsage()
const limit = Math.max(1, 5 - (cpu / 20) | 0)
sphinxSingle.query(`SELECT * FROM torrents ORDER BY rand() limit ${limit}`, (error, torrents) => {
if(!torrents || torrents.length == 0) {
callback(undefined)
return;
@ -222,7 +227,7 @@ module.exports = async ({
}
const inSql = Object.keys(hashes).map(hash => sphinx.escape(hash)).join(',');
sphinx.query(`SELECT * FROM files WHERE hash IN(${inSql}) limit 50000`, (error, files) => {
sphinxSingle.query(`SELECT * FROM files WHERE hash IN(${inSql}) limit 50000`, (error, files) => {
if(!files)
{
files = []
@ -248,14 +253,14 @@ module.exports = async ({
if(compareVersions(address.version, '0.19.0') < 0)
{
console.log('replication now works only with 0.19.0 version, ignore this torrent')
logT('replication', 'replication now works only with 0.19.0 version, ignore this torrent')
return
}
gotTorrents += torrents.length
torrents.forEach((torrent) => {
console.log('replicate remote torrent', torrent && torrent.name)
logT('replication', 'replicate remote torrent', torrent && torrent.name)
insertTorrentToDB(torrent)
})
})
@ -263,7 +268,7 @@ module.exports = async ({
setTimeout(() => getReplicationTorrents(gotTorrents > 8 ? gotTorrents * 600 : 10000), nextTimeout)
}
// start
console.log('replication client is enabled')
logT('replication', 'replication client is enabled')
getReplicationTorrents()
}
}
@ -338,13 +343,13 @@ module.exports = async ({
const isSHA1 = isSH1Hash(text)
sphinx.query('SELECT * FROM `torrents` WHERE ' + (isSHA1 ? 'hash = ?' : 'MATCH(?)') + ' ' + where + ' ' + order + ' LIMIT ?,?', args, function (error, rows, fields) {
if(!rows) {
console.log(error)
logT('search', error)
callback(undefined)
return;
}
if(rows.length === 0 && isSHA1 && !isP2P) // trying to get via dht
{
console.log('get torrent via infohash with dht')
logT('search', 'get torrent via infohash with dht')
torrentClient.getMetadata(text, (torrent) => {
searchList.push(baseRowData(torrent));
callback(searchList);
@ -363,7 +368,7 @@ module.exports = async ({
recive('searchTorrent', mergeTorrentsWithDownloadsFn((text, navigation, callback) => {
searchTorrentCall(text, navigation, callback)
p2p.emit('searchTorrent', {text, navigation}, (remote, socketObject) => {
console.log('remote search results', remote && remote.length)
logT('search', 'remote search results', remote && remote.length)
if(remote && remote.length > 0)
{
const { _socket: socket } = socketObject
@ -438,7 +443,7 @@ module.exports = async ({
//sphinx.query('SELECT * FROM `files` inner join torrents on(torrents.hash = files.hash) WHERE files.path like \'%' + text + '%\' ' + where + ' ' + order + ' LIMIT ?,?', args, function (error, rows, fields) {
sphinx.query('SELECT * FROM `files` WHERE MATCH(?) ' + where + ' ' + order + ' LIMIT ?,?', args, function (error, files, fields) {
if(!files) {
console.log(error)
logT('search', error)
callback(undefined)
return;
}
@ -458,7 +463,7 @@ module.exports = async ({
const inSql = Object.keys(search).map(hash => sphinx.escape(hash)).join(',');
sphinx.query(`SELECT * FROM torrents WHERE hash IN(${inSql})`, (err, torrents) => {
if(!torrents) {
console.log(err)
logT('search', err)
return;
}
@ -486,7 +491,7 @@ module.exports = async ({
recive('searchFiles', mergeTorrentsWithDownloadsFn((text, navigation, callback) => {
searchFilesCall(text, navigation, callback)
p2p.emit('searchFiles', {text, navigation}, (remote, socketObject) => {
console.log('remote search files results', remote && remote.length)
logT('search', 'remote search files results', remote && remote.length)
if(remote && remote.length > 0)
{
const { _socket: socket } = socketObject
@ -562,7 +567,7 @@ module.exports = async ({
{
topTorrentsCall(type, navigation, callback)
p2p.emit('topTorrents', {type, navigation}, (remote, socketObject) => {
console.log('remote top results', remote && remote.length)
logT('top', 'remote top results', remote && remote.length)
if(remote && remote.length > 0)
{
const { _socket: socket } = socketObject
@ -652,9 +657,9 @@ module.exports = async ({
torrentClient._add = (torrentObject, savePath, callback) =>
{
const magnet = `magnet:?xt=urn:btih:${torrentObject.hash}`
console.log('download', magnet)
logT('downloader', 'download', magnet)
if(torrentClient.get(magnet)) {
console.log('aready added')
logT('downloader', 'aready added')
if(callback)
callback(false)
return
@ -675,7 +680,7 @@ module.exports = async ({
}
torrent.on('ready', () => {
console.log('start downloading', torrent.infoHash, 'to', torrent.path)
logT('downloader', 'start downloading', torrent.infoHash, 'to', torrent.path)
send('downloading', torrent.infoHash)
progress(0) // immediately display progress
if(torrent._paused)
@ -686,7 +691,7 @@ module.exports = async ({
})
torrent.on('done', () => {
console.log('download done', torrent.infoHash)
logT('downloader', 'download done', torrent.infoHash)
progress(0) // update progress
// remove torrent if marked
if(torrent.removeOnDone)
@ -694,7 +699,7 @@ module.exports = async ({
torrentClient.remove(magnet, (err) => {
if(err)
{
console.log('download removing error', err)
logT('downloader', 'download removing error', err)
return
}
@ -719,7 +724,7 @@ module.exports = async ({
//custom api pause
torrent._pause = () => {
console.log('pause torrent downloading', torrent.infoHash)
logT('downloader', 'pause torrent downloading', torrent.infoHash)
torrent.pause()
torrent.wires = [];
setTimeout(() => {
@ -737,7 +742,7 @@ module.exports = async ({
}
torrent._resume = () => {
console.log('resume torrent downloading', torrent.infoHash)
logT('downloader', 'resume torrent downloading', torrent.infoHash)
torrent._restoreWires()
torrent.resume()
}
@ -767,13 +772,13 @@ module.exports = async ({
const id = torrentClientHashMap[hash]
if(!id)
{
console.log('cant find torrent for removing', hash)
logT('downloader', 'cant find torrent for removing', hash)
return
}
const torrent = torrentClient.get(id)
if(!torrent) {
console.log('no torrent for update founded')
logT('downloader', 'no torrent for update founded')
return
}
@ -802,7 +807,7 @@ module.exports = async ({
const id = torrentClientHashMap[hash]
if(!id)
{
console.log('cant find torrent for removing', hash)
logT('downloader', 'cant find torrent for removing', hash)
if(callback)
callback(false)
return
@ -811,7 +816,7 @@ module.exports = async ({
torrentClient.remove(id, (err) => {
if(err)
{
console.log('download removing error', err)
logT('downloader', 'download removing error', err)
if(callback)
callback(false)
return
@ -846,12 +851,12 @@ module.exports = async ({
return
removeProtect = true
console.log('checktorrents call')
logT('clean', 'checktorrents call')
const toRemove = []
const done = async () => {
console.log('torrents to remove founded', toRemove.length)
logT('clean', 'torrents to remove founded', toRemove.length)
if(checkOnly)
{
callback(toRemove.length)
@ -865,7 +870,7 @@ module.exports = async ({
})
callback(toRemove.length)
removeProtect = false
console.log('removed torrents by filter:', toRemove.length)
logT('clean', 'removed torrents by filter:', toRemove.length)
}
let i = 1
@ -960,7 +965,7 @@ module.exports = async ({
if(!myself)
{
console.log('replicate torrent from store record', torrent.hash)
logT('store', 'replicate torrent from store record', torrent.hash)
await insertTorrentToDB(torrent)
}
@ -1009,7 +1014,7 @@ module.exports = async ({
if(remoteFeed.feed.length > feed.size() || (remoteFeed.feed.length == feed.size() && remoteFeed.feedDate > feed.feedDate))
{
console.log('replace our feed with remote feed')
logT('feed', 'replace our feed with remote feed')
feed.feed = remoteFeed.feed
feed.feedDate = remoteFeed.feedDate || 0
// it can be new torrents replicate all

View File

@ -78,32 +78,57 @@ if (!fs.existsSync(app.getPath("userData"))){
const logFile = fs.createWriteStream(app.getPath("userData") + '/rats.log', {flags : 'w'});
const logStdout = process.stdout;
const colors = require('ansi-256-colors');
const stringHashCode = (str) => {
let hash = 0, i, chr;
if (str.length === 0)
return hash;
for (i = 0; i < str.length; i++) {
chr = str.charCodeAt(i);
hash = ((hash << 5) - hash) + chr;
hash |= 0; // Convert to 32bit integer
}
return hash;
};
console.log = (...d) => {
const date = (new Date).toLocaleTimeString()
logFile.write(`[${date}] ` + util.format(...d) + '\n');
logStdout.write(util.format(...d) + '\n');
};
global.logT = (type, ...d) => {
const date = (new Date).toLocaleTimeString()
logFile.write(`[${date}] [${type}] ` + util.format(...d) + '\n');
logStdout.write(colors.fg.codes[Math.abs(stringHashCode(type)) % 256] + `[${type}]` + colors.reset + ' ' + util.format(...d) + '\n');
}
global.logTE = (type, ...d) => {
const date = (new Date).toLocaleTimeString()
logFile.write(`\n[${date}] [ERROR] [${type}] ` + util.format(...d) + '\n\n');
logStdout.write(colors.fg.codes[Math.abs(stringHashCode(type)) % 256] + `[${type}]` + colors.reset + ' ' + colors.fg.codes[9] + util.format(...d) + colors.reset + '\n');
}
// print os info
console.log('Rats', app.getVersion())
console.log('Platform:', os.platform())
console.log('Arch:', os.arch())
console.log('OS Release:', os.release())
console.log('CPU:', os.cpus()[0].model)
console.log('CPU Logic cores:', os.cpus().length)
console.log('Total memory:', (os.totalmem() / (1024 * 1024)).toFixed(2), 'MB')
console.log('Free memory:', (os.freemem() / (1024 * 1024)).toFixed(2), 'MB')
console.log('NodeJS:', process.version)
logT('system', 'Rats', app.getVersion())
logT('system', 'Platform:', os.platform())
logT('system', 'Arch:', os.arch())
logT('system', 'OS Release:', os.release())
logT('system', 'CPU:', os.cpus()[0].model)
logT('system', 'CPU Logic cores:', os.cpus().length)
logT('system', 'Total memory:', (os.totalmem() / (1024 * 1024)).toFixed(2), 'MB')
logT('system', 'Free memory:', (os.freemem() / (1024 * 1024)).toFixed(2), 'MB')
logT('system', 'NodeJS:', process.version)
if(portative)
console.log('portative compability')
logT('system', 'portative compability')
// handle promise rejections
process.on('unhandledRejection', r => console.log('Rejection:', r));
process.on('unhandledRejection', r => logTE('system', 'Rejection:', r));
const shouldQuit = app.makeSingleInstance(function(commandLine, workingDirectory) {
// Someone tried to run a second instance, we should focus our window.
console.log('openned second application, just focus this one')
logT('app', 'openned second application, just focus this one')
if (mainWindow) {
if (mainWindow.isMinimized())
mainWindow.restore();
@ -112,7 +137,7 @@ const shouldQuit = app.makeSingleInstance(function(commandLine, workingDirectory
});
if (shouldQuit) {
console.log('closed because of second application')
logT('app', 'closed because of second application')
app.exit(0);
}
@ -122,12 +147,12 @@ log.transports.file.level = false;
log.transports.console.level = false;
log.transports.console = function(msg) {
const text = util.format.apply(util, msg.data);
console.log(text);
logT('updater', text);
};
autoUpdater.logger = log;
autoUpdater.on('update-downloaded', () => {
console.log('update-downloaded lats quitAndInstall');
logT('updater', 'update-downloaded lats quitAndInstall');
if (env.name === "production") {
dialog.showMessageBox({
type: 'info',
@ -201,10 +226,7 @@ app.on("ready", () => {
} },
{ label: 'Quit', click: function(){
app.isQuiting = true;
if (sphinx)
stop()
else
app.quit()
} }
]);
@ -224,7 +246,7 @@ app.on("ready", () => {
checkInternet(enabled => {
if(!enabled)
{
console.log('no internet connection were founded, updater not started')
logT('updater', 'no internet connection were founded, updater not started')
return
}
@ -233,7 +255,7 @@ app.on("ready", () => {
autoUpdater.getUpdateInfo().then(info => {
if(info.version == app.getVersion())
{
console.log('update not founded for version', app.getVersion())
logT('updater', 'update not founded for version', app.getVersion())
return
}
@ -266,6 +288,7 @@ app.on("ready", () => {
{
const id = arg[arg.length - 1].callback
arg[arg.length - 1] = (responce) => {
if(mainWindow)
mainWindow.webContents.send('callback', id, responce)
}
}
@ -293,28 +316,54 @@ const stop = () => {
return
stopProtect = true
// hide on case of long exit, to prevent user clicks
if(mainWindow)
mainWindow.hide()
// bug with mac os tray closing
// https://github.com/electron/electron/issues/9982
// https://github.com/electron/electron/issues/13556
if(process.platform !== 'darwin')
{
if(tray)
tray.destroy()
}
if(spider)
{
spider.stop(() => sphinx.stop())
}
else
else if(sphinx)
{
sphinx.stop()
}
else
{
app.quit()
}
}
app.on("window-all-closed", () => {
if (sphinx)
stop()
else
app.quit()
});
app.on('before-quit', () => {
if(rl)
rl.close()
app.isQuiting = true
if (sphinx)
stop()
})
var rl = require("readline").createInterface({
input: process.stdin,
output: process.stdout
});
rl.on("SIGINT", function () {
process.emit("SIGINT");
});
process.on("SIGINT", () => {
stop()
});

View File

@ -0,0 +1,51 @@
var os = require("os");
//Create function to get CPU information
function cpuAverage() {
//Initialise sum of idle and time of cores and fetch CPU info
let totalIdle = 0, totalTick = 0;
const cpus = os.cpus();
//Loop through CPU cores
for(let i = 0, len = cpus.length; i < len; i++) {
//Select CPU core
const cpu = cpus[i];
//Total up the time in the cores tick
for(const type in cpu.times) {
totalTick += cpu.times[type];
}
//Total up the idle time of the core
totalIdle += cpu.times.idle;
}
//Return the average Idle and Tick times
return {idle: totalIdle / cpus.length, total: totalTick / cpus.length};
}
//Grab first CPU Measure
let startMeasure = cpuAverage();
let percentageCPU = 0
//Set delay for second Measure
const cpuTimer = setInterval(function() {
//Grab second Measure
const endMeasure = cpuAverage();
//Calculate the difference in idle and total time between the measures
const idleDifference = endMeasure.idle - startMeasure.idle;
const totalDifference = endMeasure.total - startMeasure.total;
//Calculate the average percentage CPU usage
percentageCPU = 100 - ~~(100 * idleDifference / totalDifference);
startMeasure = endMeasure
}, 300);
cpuTimer.unref()
module.exports = () => percentageCPU

View File

@ -285,7 +285,7 @@ class Spider extends Emiter {
this.udp = dgram.createSocket('udp4')
this.udp.bind(port)
this.udp.on('listening', () => {
console.log(`Listen DHT protocol on ${this.udp.address().address}:${this.udp.address().port}`)
logT('spider', `Listen DHT protocol on ${this.udp.address().address}:${this.udp.address().port}`)
})
this.udp.on('message', (data, addr) => {
this.parse(data, addr)

View File

@ -15,7 +15,7 @@ const requests = {};
let message = function (buf, host, port) {
server.send(buf, 0, buf.length, port, host, function(err, bytes) {
if (err) {
console.log(err.message);
logT('udp-tracker', err.message);
}
});
};
@ -69,7 +69,7 @@ let scrapeTorrent = function (connectionIdHigh, connectionIdLow, transactionId)
message(buffer, connection.host, connection.port);
} catch(error)
{
console.log('ERROR on scrape', error)
logT('udp-tracker', 'ERROR on scrape', error)
}
};
@ -112,7 +112,7 @@ server.on("message", function (msg, rinfo) {
delete requests[transactionId];
} else if (action === ACTION_ERROR) {
delete requests[transactionId];
console.log("error in scrape response");
logT('udp-tracker', "error in scrape response");
}
});
@ -125,7 +125,7 @@ let getPeersStatistic = (host, port, hash, callback) => {
server.on("listening", function () {
var address = server.address();
console.log("listening udp tracker respose on " + address.address + ":" + address.port);
logT('udp-tracker', "listening udp tracker respose on " + address.address + ":" + address.port);
});
server.bind(config.udpTrackersPort);

View File

@ -28,7 +28,7 @@ let config = {
sphinx: {
host : '127.0.0.1',
port : 9306,
connectionLimit: 12
connectionLimit: 10
},
spider: {
@ -59,6 +59,7 @@ let config = {
cleanupDiscLimit: 7 * 1024 * 1024 * 1024,
spaceQuota: false,
spaceDiskLimit: 7 * 1024 * 1024 * 1024,
recheckFilesOnAdding: true,
dbPath: '',

View File

@ -103,12 +103,12 @@ module.exports = async (callback, mainWindow, sphinxApp) => {
}
const patch = async (version) => {
console.log('db version', version)
logT('patcher', 'db version', version)
switch(version)
{
case 1:
{
console.log('patch db to version 2')
logT('patcher', 'patch db to version 2')
openPatchWindow()
let i = 1
@ -116,7 +116,7 @@ module.exports = async (callback, mainWindow, sphinxApp) => {
const files = (await sphinx.query("SELECT COUNT(*) AS c FROM files"))[0].c
await forBigTable(sphinx, 'torrents', async (torrent) => {
console.log('update index', torrent.id, torrent.name, '[', i, 'of', torrents, ']')
logT('patcher', 'update index', torrent.id, torrent.name, '[', i, 'of', torrents, ']')
if(patchWindow)
patchWindow.webContents.send('reindex', {field: torrent.name, index: i++, all: torrents, torrent: true})
@ -126,7 +126,7 @@ module.exports = async (callback, mainWindow, sphinxApp) => {
})
i = 1
await forBigTable(sphinx, 'files', async (file) => {
console.log('update index', file.id, file.path, '[', i, 'of', files, ']')
logT('patcher', 'update index', file.id, file.path, '[', i, 'of', files, ']')
if(patchWindow)
patchWindow.webContents.send('reindex', {field: file.path, index: i++, all: files})
@ -141,13 +141,13 @@ module.exports = async (callback, mainWindow, sphinxApp) => {
{
openPatchWindow()
console.log('optimizing torrents')
logT('patcher', 'optimizing torrents')
if(patchWindow)
patchWindow.webContents.send('optimize', {field: 'torrents'})
sphinx.query(`OPTIMIZE INDEX torrents`)
await sphinxApp.waitOptimized('torrents')
console.log('optimizing files')
logT('patcher', 'optimizing files')
if(patchWindow)
patchWindow.webContents.send('optimize', {field: 'files'})
sphinx.query(`OPTIMIZE INDEX files`)
@ -165,7 +165,7 @@ module.exports = async (callback, mainWindow, sphinxApp) => {
let i = 1
const torrents = (await sphinx.query("SELECT COUNT(*) AS c FROM torrents"))[0].c
await forBigTable(sphinx, 'torrents', async (torrent) => {
console.log('update index', torrent.id, torrent.name, '[', i, 'of', torrents, '] - delete:', bad)
logT('patcher', 'update index', torrent.id, torrent.name, '[', i, 'of', torrents, '] - delete:', bad)
if(patchWindow)
patchWindow.webContents.send('reindex', {field: torrent.name, index: i++, all: torrents, torrent: true})
@ -177,7 +177,7 @@ module.exports = async (callback, mainWindow, sphinxApp) => {
torrentTypeDetect(torrent, torrent.filesList)
if(torrent.contentType == 'bad')
{
console.log('remove bad torrent', torrent.name)
logT('patcher', 'remove bad torrent', torrent.name)
bad++
await sphinx.query(`DELETE FROM torrents WHERE hash = '${torrent.hash}'`)
await sphinx.query(`DELETE FROM files WHERE hash = '${torrent.hash}'`)
@ -185,7 +185,7 @@ module.exports = async (callback, mainWindow, sphinxApp) => {
}
})
console.log('removed', bad, 'torrents')
logT('patcher', 'removed', bad, 'torrents')
await setVersion(4)
}
@ -200,7 +200,7 @@ module.exports = async (callback, mainWindow, sphinxApp) => {
let patch = 1
await forBigTable(sphinx, 'torrents', async (torrent) => {
console.log('remember index', torrent.id, torrent.name, '[', i, 'of', torrents, ']')
logT('patcher', 'remember index', torrent.id, torrent.name, '[', i, 'of', torrents, ']')
if(patchWindow)
patchWindow.webContents.send('reindex', {field: torrent.name, index: i++, all: torrents, torrent: true})
@ -209,7 +209,7 @@ module.exports = async (callback, mainWindow, sphinxApp) => {
if(torrentsArray.length >= 20000)
{
fs.writeFileSync(`${sphinxApp.directoryPath}/torrents.patch.${patch++}`, JSON.stringify(torrentsArray, null, 4), 'utf8');
console.log('write torrents dump', `${sphinxApp.directoryPath}/torrents.patch.${patch-1}`)
logT('patcher', 'write torrents dump', `${sphinxApp.directoryPath}/torrents.patch.${patch-1}`)
torrentsArray = []
}
})
@ -217,7 +217,7 @@ module.exports = async (callback, mainWindow, sphinxApp) => {
if(torrentsArray.length > 0)
{
fs.writeFileSync(`${sphinxApp.directoryPath}/torrents.patch.${patch}`, JSON.stringify(torrentsArray, null, 4), 'utf8');
console.log('write torrents dump', `${sphinxApp.directoryPath}/torrents.patch.${patch}`)
logT('patcher', 'write torrents dump', `${sphinxApp.directoryPath}/torrents.patch.${patch}`)
torrentsArray = []
}
else
@ -232,19 +232,19 @@ module.exports = async (callback, mainWindow, sphinxApp) => {
sphinxApp.stop(resolve, true)
})
console.log('sphinx stoped for patching')
logT('patcher', 'sphinx stoped for patching')
await new Promise((resolve) => {
glob(`${sphinxApp.directoryPathDb}/torrents.*`, function (er, files) {
files.forEach(file => {
console.log('clear torrents file', file)
logT('patcher', 'clear torrents file', file)
fs.unlinkSync(path.resolve(file))
})
resolve()
})
})
console.log('cleaned torrents db structure, rectreating again')
logT('patcher', 'cleaned torrents db structure, rectreating again')
i = 1
await new Promise((resolve) => {
// reopen sphinx
@ -254,14 +254,14 @@ module.exports = async (callback, mainWindow, sphinxApp) => {
}) // same args
})
console.log('sphinx restarted, patch db now')
logT('patcher', 'sphinx restarted, patch db now')
for(let k = 1; k <= patch; k++)
{
torrentsArray = JSON.parse(fs.readFileSync(`${sphinxApp.directoryPath}/torrents.patch.${k}`, 'utf8'))
console.log('read torrents dump', `${sphinxApp.directoryPath}/torrents.patch.${k}`)
logT('patcher', 'read torrents dump', `${sphinxApp.directoryPath}/torrents.patch.${k}`)
await asyncForEach(torrentsArray, async (torrent) => {
console.log('update index', torrent.id, torrent.name, '[', i, 'of', torrents, ']')
logT('patcher', 'update index', torrent.id, torrent.name, '[', i, 'of', torrents, ']')
if(patchWindow)
patchWindow.webContents.send('reindex', {field: torrent.name, index: i++, all: torrents, torrent: true})
@ -274,7 +274,7 @@ module.exports = async (callback, mainWindow, sphinxApp) => {
await new Promise((resolve) => {
glob(`${sphinxApp.directoryPath}/torrents.patch.*`, function (er, files) {
files.forEach(file => {
console.log('clear dump file', file)
logT('patcher', 'clear dump file', file)
fs.unlinkSync(path.resolve(file))
})
resolve()
@ -283,7 +283,7 @@ module.exports = async (callback, mainWindow, sphinxApp) => {
torrentsArray = null
console.log('optimizing torrents')
logT('patcher', 'optimizing torrents')
if(patchWindow)
patchWindow.webContents.send('optimize', {field: 'torrents'})
sphinx.query(`OPTIMIZE INDEX torrents`)
@ -292,7 +292,7 @@ module.exports = async (callback, mainWindow, sphinxApp) => {
await setVersion(5)
}
}
console.log('db patch done')
logT('patcher', 'db patch done')
sphinx.destroy()
if(patchWindow)
{
@ -306,14 +306,14 @@ module.exports = async (callback, mainWindow, sphinxApp) => {
// init of db, we can set version to last
if(sphinxApp && sphinxApp.isInitDb)
{
console.log('new db, set version to last version', currentVersion)
logT('patcher', 'new db, set version to last version', currentVersion)
await setVersion(currentVersion)
}
sphinx.query('select * from version', async (err, version) => {
if(err)
{
console.log('error on version get on db patch')
logTE('patcher', 'error on version get on db patch')
return
}
@ -324,17 +324,17 @@ module.exports = async (callback, mainWindow, sphinxApp) => {
const ver = parseInt(fs.readFileSync(`${sphinxApp.directoryPath}/version.vrs`))
if(ver > 0)
{
console.log('readed version from version.vrs', ver)
logT('patcher', 'readed version from version.vrs', ver)
patch(ver)
}
else
{
console.log('error: bad version in version.vrs')
logT('patcher', 'error: bad version in version.vrs')
}
}
else
{
console.log('version not founded, set db version to 1')
logT('patcher', 'version not founded, set db version to 1')
await setVersion(1)
patch(1)
}

View File

@ -19,11 +19,11 @@ module.exports = class Feed {
if(!this.loaded)
return // feed not loaded on begining, ignore saving
console.log('saving feed')
logT('feed', 'saving feed')
config.feedDate = this.feedDate
await this.sphinx.query('delete from feed where id > 0')
let id = 0
return Promise.all(
await Promise.all(
this.feed.map(
async record => await this.sphinx.query('insert into feed(id, data) values(?, ?)', [++id, JSON.stringify(record)])
)
@ -40,12 +40,12 @@ module.exports = class Feed {
this._order()
this.feedDate = config.feedDate
this.loaded = true
console.log('lodead feed')
logT('feed', 'lodead feed')
}
clear()
{
console.log('clearing feed')
logT('feed', 'clearing feed')
this.feed = []
}

View File

@ -3,7 +3,7 @@ module.exports = (sphinx, table, callback, doneCallback, max = 1000, where = '')
sphinx.query(`SELECT * FROM ${table} WHERE id > ${index} ${where} LIMIT ${max}`, (err, torrents) => {
const finish = () => {
if(err)
console.log('big table parse error', err)
logTE('sql', 'big table parse error', err)
if(doneCallback)
doneCallback(true)
done(true)

View File

@ -2,7 +2,15 @@ const mysql = require('mysql');
const config = require('./config');
const expand = (sphinx) => {
const queryCall = sphinx.query.bind(sphinx)
const queryOriginal = sphinx.query.bind(sphinx)
const queryCall = (...args) => {
if(sphinx.__closed)
{
logT('sql', 'prevent sql request after end of connection')
return
}
return queryOriginal(...args)
}
sphinx.query = (sql, args, callback) => new Promise((resolve, reject) => {
if(typeof args === 'function' || typeof args === 'undefined')
@ -113,19 +121,81 @@ const expand = (sphinx) => {
return sphinx
}
const pool = () => {
const pool = async () => {
if(/^win/.test(process.platform))
{
logT('sql', 'using main pool mechanism')
let sphinx = mysql.createPool({
connectionLimit: config.sphinx.connectionLimit,
// bug under mac with some problems on big connection size, limit this to very low value on mac os x
connectionLimit: process.platform === 'darwin' ? 3 : config.sphinx.connectionLimit,
host : config.sphinx.host,
port : config.sphinx.port
});
return expand(sphinx)
sphinx = expand(sphinx)
const end = sphinx.end.bind(sphinx)
sphinx.end = (cb) => new Promise(resolve => {
sphinx.__closed = true
end(() => {
resolve()
if(cb) cb()
})
})
return sphinx
}
else
{
logT('sql', 'using alternative pool mechanism')
let connectionPool = []
let connectionsLimit = config.sphinx.connectionLimit
let currentConnection = 0
for(let i = 0; i < connectionsLimit; i++)
{
connectionPool[i] = await single().waitConnection()
}
const buildPoolMethod = (name, ...args) => {
if(!connectionPool)
return
const data = connectionPool[currentConnection][name](...args)
currentConnection = (currentConnection + 1) % connectionsLimit
return data
}
return new Proxy({
query(...args) {
return buildPoolMethod('query', ...args)
},
insertValues(...args) {
return buildPoolMethod('insertValues', ...args)
},
updateValues(...args) {
return buildPoolMethod('updateValues', ...args)
},
async end(cb)
{
await Promise.all(connectionPool.map(conn => conn.end()))
if(cb)
cb()
connectionPool = null
}
}, {
get(target, prop)
{
if(!target[prop])
{
return connectionPool[0][prop]
}
return target[prop]
}
})
}
}
let mysqlSingle = {
const single = (callback) => {
let mysqlSingle = {
_mysql: null
};
const proxySingle = new Proxy(mysqlSingle, {
};
const proxySingle = new Proxy(mysqlSingle, {
get(target, prop) {
if(!target[prop])
{
@ -136,8 +206,10 @@ const proxySingle = new Proxy(mysqlSingle, {
}
return target[prop]
}
})
const single = (callback) => {
})
const start = () =>
{
mysqlSingle._mysql = mysql.createConnection({
host : config.sphinx.host,
port : config.sphinx.port
@ -151,7 +223,7 @@ const single = (callback) => {
mysqlSingle._mysql.connect((mysqlError) => {
if (mysqlError) {
console.error('error connecting: ' + mysqlError.stack);
logT('sql', 'error connecting: ' + mysqlError.stack);
return;
}
@ -162,17 +234,33 @@ const single = (callback) => {
});
mysqlSingle._mysql.on('error', (err) => {
console.log('db error', err);
if(err.code === 'PROTOCOL_CONNECTION_LOST') { // Connection to the MySQL server is usually
logT('sql', 'lost connection, restart single sql connection')
mysqlSingle._mysql = undefined
single(); // lost due to either server restart, or a
start(); // lost due to either server restart, or a
} else { // connnection idle timeout (the wait_timeout
logTE('sql', 'db error', err);
throw err; // server variable configures this)
}
});
mysqlSingle._mysql = expand(mysqlSingle._mysql)
// fix prevent query after closing
const end = mysqlSingle._mysql.end.bind(mysqlSingle._mysql)
mysqlSingle._mysql.end = (cb) => new Promise(resolve => {
mysqlSingle._mysql.__closed = true
end(() => {
resolve()
if(cb)
cb()
})
})
return proxySingle
}
return start()
}
module.exports = {pool, single}

View File

@ -23,7 +23,7 @@ class p2p {
this.info = {}
if(!config.peerId)
{
console.log('generate peerId')
logT('p2p', 'generate peerId')
config.peerId = Math.random().toString(36).substring(2, 15) + Math.random().toString(36).substring(2, 15)
}
this.peerId = config.peerId;
@ -44,7 +44,7 @@ class p2p {
this.tcpServer.on('connection', (socket) => {
this.tcpServer.getConnections((err,con) => {
console.log('server connected', con, 'max', this.tcpServer.maxConnections)
logT('p2p', 'server connected', con, 'max', this.tcpServer.maxConnections)
})
socket = new JsonSocket(socket);
this.clients.push(socket)
@ -108,7 +108,7 @@ class p2p {
// new peer with peer exchange
this.on('peer', (peer) => {
console.log('got peer exchange', peer)
logT('p2p', 'got peer exchange', peer)
this.add(peer)
})
@ -124,7 +124,7 @@ class p2p {
if (alias >= 1) {
// nothing
} else {
console.log('ignore local address', iface.address);
logT('p2p', 'ignore local address', iface.address);
this.ignore(iface.address)
}
++alias;
@ -133,29 +133,32 @@ class p2p {
}
listen() {
console.log('listen p2p on', config.spiderPort, 'port')
logT('p2p', 'listen p2p on', config.spiderPort, 'port')
this.tcpServer.listen(config.spiderPort, '0.0.0.0');
}
checkPortAndRedirect(address, port) {
isPortReachable(port, {host: address}).then((isAvailable) => {
if(this.closing)
return // responce can be very late, and ssh can start after closing of program, this will break on linux
this.p2pStatus = isAvailable ? 2 : 0
this.send('p2pStatus', this.p2pStatus)
// all ok don't need to start any ssh tunnels
if(isAvailable)
{
console.log('tcp p2p port is reachable - all ok')
logT('ssh', 'tcp p2p port is reachable - all ok')
return;
}
else
{
console.log('tcp p2p port is unreachable - try ssh tunnel')
logT('ssh', 'tcp p2p port is unreachable - try ssh tunnel')
}
if(!this.encryptor)
{
console.error('something wrong with encryptor')
logT('ssh', 'something wrong with encryptor')
return
}
@ -170,7 +173,7 @@ class p2p {
return
}
console.log('ssh tunnel success, redirect peers to ssh')
logT('ssh', 'ssh tunnel success, redirect peers to ssh')
this.p2pStatus = 1
this.send('p2pStatus', this.p2pStatus)
@ -183,9 +186,10 @@ class p2p {
close()
{
this.closing = true
if(this.ssh)
{
console.log('closing ssh...')
logT('ssh', 'closing ssh...')
this.ssh.kill()
}
// close server
@ -312,7 +316,7 @@ class p2p {
torrents: data.info ? data.info.torrents || 0 : 0
})
this.events.emit('peer', address)
console.log('new peer', address)
logT('p2p', 'new peer', address)
// add some other peers
if(data.peers && data.peers.length > 0)
@ -338,7 +342,7 @@ class p2p {
}
this.peers.splice(index, 1);
console.log('close peer connection', address)
logT('p2p', 'close peer connection', address)
}
})

View File

@ -14,21 +14,45 @@ const os = require('os')
const packageJson = JSON.parse(fs.readFileSync('package.json', 'utf8'));
const util = require('util');
const colors = require('ansi-256-colors');
const stringHashCode = (str) => {
let hash = 0, i, chr;
if (str.length === 0)
return hash;
for (i = 0; i < str.length; i++) {
chr = str.charCodeAt(i);
hash = ((hash << 5) - hash) + chr;
hash |= 0; // Convert to 32bit integer
}
return hash;
};
global.logT = (type, ...d) => {
console.log(colors.fg.codes[Math.abs(stringHashCode(type)) % 256] + `[${type}]` + colors.reset + ' ' + util.format(...d));
}
global.logTE = (type, ...d) => {
console.log(colors.fg.codes[Math.abs(stringHashCode(type)) % 256] + `[${type}]` + colors.reset + ' ' + colors.fg.codes[9] + util.format(...d) + colors.reset + '\n');
}
server.listen(appConfig.httpPort);
console.log('Listening web server on', appConfig.httpPort, 'port')
console.log('Platform:', os.platform())
console.log('Arch:', os.arch())
console.log('OS Release:', os.release())
console.log('CPU:', os.cpus()[0].model)
console.log('CPU Logic cores:', os.cpus().length)
console.log('Total memory:', (os.totalmem() / (1024 * 1024)).toFixed(2), 'MB')
console.log('Free memory:', (os.freemem() / (1024 * 1024)).toFixed(2), 'MB')
console.log('NodeJS:', process.version)
logT('system', 'Rats v' + packageJson.version)
logT('system', 'Listening web server on', appConfig.httpPort, 'port')
logT('system', 'Platform:', os.platform())
logT('system', 'Arch:', os.arch())
logT('system', 'OS Release:', os.release())
logT('system', 'CPU:', os.cpus()[0].model)
logT('system', 'CPU Logic cores:', os.cpus().length)
logT('system', 'Total memory:', (os.totalmem() / (1024 * 1024)).toFixed(2), 'MB')
logT('system', 'Free memory:', (os.freemem() / (1024 * 1024)).toFixed(2), 'MB')
logT('system', 'NodeJS:', process.version)
const majorVersion = /v?([0-9]+)\.?([0-9]+)?\.?([0-9]+)?\.?([0-9]+)?/.exec(process.version)[1]
if(majorVersion < 8)
{
console.log('Minumum Node.JS version >= 8.0.0, please update and try again')
logTE('system', 'Minumum Node.JS version >= 8.0.0, please update and try again')
process.exit(1);
}
@ -63,6 +87,7 @@ rl.on("SIGINT", function () {
});
process.on("SIGINT", () => {
rl.close()
if(spider)
{
spider.stop(() => sphinx.stop(() => process.exit()))

View File

@ -136,8 +136,8 @@ const writeSphinxConfig = (path, dbPath) => {
config = iconv.encode(config, 'win1251')
fs.writeFileSync(`${path}/sphinx.conf`, config)
console.log(`writed sphinx config to ${path}`)
console.log('db path:', dbPath)
logT('sphinx', `writed sphinx config to ${path}`)
logT('sphinx', 'db path:', dbPath)
return {isInitDb}
}
@ -146,7 +146,7 @@ module.exports = (callback, dataDirectory, onClose) => {
const start = (callback) => {
const sphinxPath = path.resolve(appPath('searchd'))
console.log('Sphinx Path:', sphinxPath)
logT('sphinx', 'Sphinx Path:', sphinxPath)
const sphinxConfigDirectory = dataDirectory
appConfig['dbPath'] = appConfig.dbPath && appConfig.dbPath.length > 0 ? appConfig.dbPath : sphinxConfigDirectory;
@ -174,14 +174,14 @@ module.exports = (callback, dataDirectory, onClose) => {
const optimizeResolvers = {}
sphinx.stdout.on('data', (data) => {
console.log(`sphinx: ${data}`)
logT('sphinx', `sphinx: ${data}`)
// don't listen if we are in fixing mode
if(sphinx.fixing)
return
if (data.includes('accepting connections')) {
console.log('catched sphinx start')
logT('sphinx', 'catched sphinx start')
if(callback)
callback()
}
@ -196,14 +196,14 @@ module.exports = (callback, dataDirectory, onClose) => {
{
if(optimizeResolvers[checkOptimized[1]])
{
console.log('resolve optimizer', checkOptimized[1])
logT('sphinx', 'resolve optimizer', checkOptimized[1])
optimizeResolvers[checkOptimized[1]]()
}
}
})
sphinx.on('close', (code, signal) => {
console.log(`sphinx closed with code ${code} and signal ${signal}`)
logT('sphinx', `sphinx closed with code ${code} and signal ${signal}`)
if(onClose && !sphinx.replaceOnClose) // sometime we don't want to call default callback
onClose()
if(sphinx.onClose)
@ -211,7 +211,7 @@ module.exports = (callback, dataDirectory, onClose) => {
})
sphinx.stop = (onFinish, replaceFinish) => {
console.log('sphinx closing...')
logT('sphinx', 'sphinx closing...')
if(onFinish)
sphinx.onClose = onFinish
if(replaceFinish)
@ -234,7 +234,7 @@ module.exports = (callback, dataDirectory, onClose) => {
// close db
await new Promise((resolve) => {
sphinx.stop(resolve, true)
console.log('revent start')
logT('sphinx', 'revent start')
})
const checkNullFile = (file) => new Promise((resolve) => {
@ -258,7 +258,7 @@ module.exports = (callback, dataDirectory, onClose) => {
brokenFiles = probablyCoruptedFiles.filter((file, index) => !brokenFiles[index])
brokenFiles.forEach(file => {
console.log('FIXDB: clean file because of broken', file)
logT('sphinx', 'FIXDB: clean file because of broken', file)
fs.unlinkSync(file)
})

View File

@ -45,7 +45,7 @@ module.exports = function (send, recive, dataDirectory, version, env)
let filesId = 1;
const events = new EventEmitter
let sphinx = pool();
let sphinx = await pool();
// initialize p2p
const p2p = new P2PServer(send)
@ -73,114 +73,17 @@ module.exports = function (send, recive, dataDirectory, version, env)
}
]
let mysqlSingle = single((mysqlSingle) => {
mysqlSingle.query("SELECT MAX(`id`) as mx from torrents", (err, rows) => {
if(err)
return
const sphinxSingle = await single().waitConnection()
torrentsId = (await sphinxSingle.query("SELECT MAX(`id`) as mx from torrents"))[0]
torrentsId = ((torrentsId && torrentsId.mx) || 0) + 1
filesId = (await sphinxSingle.query("SELECT MAX(`id`) as mx from files"))[0]
filesId = ((filesId && filesId.mx) || 0) + 1
p2p.info.torrents = (await sphinxSingle.query("SELECT COUNT(*) as cnt from torrents"))[0].cnt
p2p.info.files = (await sphinxSingle.query("SELECT COUNT(*) as cnt from files"))[0].cnt
const sphinxSingleAlternative = await single().waitConnection()
if(rows[0] && rows[0].mx >= 1)
torrentsId = rows[0].mx + 1;
})
mysqlSingle.query("SELECT COUNT(*) as cnt from torrents", (err, rows) => {
if(err)
return
p2p.info.torrents = rows[0].cnt
})
mysqlSingle.query("SELECT MAX(`id`) as mx from files", (err, rows) => {
if(err)
return
if(rows[0] &&rows[0].mx >= 1)
filesId = rows[0].mx + 1;
})
mysqlSingle.query("SELECT COUNT(*) as cnt from files", (err, rows) => {
if(err)
return
p2p.info.files = rows[0].cnt
})
});
/*
app.use(express.static('build', {index: false}));
app.get('/sitemap.xml', function(req, res) {
sphinx.query('SELECT count(*) as cnt FROM `torrents` WHERE contentCategory != \'xxx\' OR contentCategory IS NULL', function (error, rows, fields) {
if(!rows) {
return;
}
let urls = []
for(let i = 0; i < Math.ceil(rows[0].cnt / config.sitemapMaxSize); i++)
urls.push(`http://${config.domain}/sitemap${i+1}.xml`);
res.header('Content-Type', 'application/xml');
res.send( sm.buildSitemapIndex({
urls
}));
});
});
app.get('/sitemap:id.xml', function(req, res) {
if(req.params.id < 1)
return;
let page = (req.params.id - 1) * config.sitemapMaxSize
sphinx.query('SELECT hash FROM `torrents` WHERE contentCategory != \'xxx\' OR contentCategory IS NULL LIMIT ?, ?', [page, config.sitemapMaxSize], function (error, rows, fields) {
if(!rows) {
return;
}
let sitemap = sm.createSitemap ({
hostname: 'http://' + config.domain,
cacheTime: 600000
});
sitemap.add({url: '/'});
for(let i = 0; i < rows.length; i++)
sitemap.add({url: '/torrent/' + rows[i].hash});
sitemap.toXML( function (err, xml) {
if (err) {
return res.status(500).end();
}
res.header('Content-Type', 'application/xml');
res.send( xml );
});
});
});
app.get('*', function(req, res)
{
if(typeof req.query['_escaped_fragment_'] != 'undefined')
{
let program = phantomjs.exec('phantom.js', 'http://' + config.domain + req.path)
let body = '';
let timeout = setTimeout(() => {
program.kill();
}, 45000)
program.stderr.pipe(process.stderr)
program.stdout.on('data', (chunk) => {
body += chunk;
});
program.on('exit', code => {
clearTimeout(timeout);
res.header('Content-Type', 'text/html');
res.send( body );
})
return;
}
res.sendfile(__dirname + '/build/index.html');
});
*/
// start
function baseRowData(row)
{
return {
@ -210,7 +113,7 @@ app.get('*', function(req, res)
if(peers && peers.length > 0)
{
peers.forEach(peer => p2p.add(peer))
console.log('loaded', peers.length, 'peers')
logT('p2p', 'loaded', peers.length, 'peers')
}
}
@ -226,7 +129,7 @@ app.get('*', function(req, res)
resolve(data.length > 0 && JSON.parse(data))
});
}).on("error", (err) => {
console.log(`${url} error: ` + err.message)
logTE('http', `${url} error: ` + err.message)
resolve(false)
});
})
@ -242,7 +145,7 @@ app.get('*', function(req, res)
if(peers && peers.length > 0)
{
peers.forEach(peer => p2p.add(peer))
console.log('loaded', peers.length, 'peers from bootstrap')
logT('p2p', 'loaded', peers.length, 'peers from bootstrap')
}
}
if(json.bootstrapMap)
@ -262,7 +165,7 @@ app.get('*', function(req, res)
}
}
}
console.log('loaded peers map from bootstrap')
logT('p2p', 'loaded peers map from bootstrap')
}
}
@ -281,7 +184,7 @@ app.get('*', function(req, res)
p2pBootstrapLoop = setInterval(() => {
if(p2p.size === 0)
{
console.log('load peers from bootstap again because no peers at this moment')
logT('p2p', 'load peers from bootstap again because no peers at this moment')
loadBootstrap()
}
}, 90000) // try to load new peers if there is no one found
@ -289,7 +192,7 @@ app.get('*', function(req, res)
const updateTorrentTrackers = (hash) => {
let maxSeeders = 0, maxLeechers = 0, maxCompleted = 0;
mysqlSingle.query('UPDATE torrents SET trackersChecked = ? WHERE hash = ?', [Math.floor(Date.now() / 1000), hash], (err, result) => {
sphinxSingle.query('UPDATE torrents SET trackersChecked = ? WHERE hash = ?', [Math.floor(Date.now() / 1000), hash], (err, result) => {
if(!result) {
console.error(err);
return
@ -320,9 +223,9 @@ app.get('*', function(req, res)
maxCompleted = completed;
let checkTime = new Date();
mysqlSingle.query('UPDATE torrents SET seeders = ?, completed = ?, leechers = ?, trackersChecked = ? WHERE hash = ?', [seeders, completed, leechers, Math.floor(checkTime.getTime() / 1000), hash], function(err, result) {
sphinxSingle.query('UPDATE torrents SET seeders = ?, completed = ?, leechers = ?, trackersChecked = ? WHERE hash = ?', [seeders, completed, leechers, Math.floor(checkTime.getTime() / 1000), hash], function(err, result) {
if(!result) {
console.error(err);
logTE('udp-tracker', err);
return
}
@ -352,7 +255,7 @@ app.get('*', function(req, res)
if(free < config.cleanupDiscLimit)
{
mysqlSingle.query(`SELECT * FROM torrents WHERE added < DATE_SUB(NOW(), INTERVAL 6 hour) ORDER BY seeders ASC, files DESC, leechers ASC, completed ASC LIMIT ${cleanTorrents}`, function(err, torrents) {
sphinxSingle.query(`SELECT * FROM torrents WHERE added < DATE_SUB(NOW(), INTERVAL 6 hour) ORDER BY seeders ASC, files DESC, leechers ASC, completed ASC LIMIT ${cleanTorrents}`, function(err, torrents) {
if(!torrents)
return;
@ -364,8 +267,8 @@ app.get('*', function(req, res)
cleanupDebug('cleanup torrent', torrent.name, '[seeders', torrent.seeders, ', files', torrent.files, ']', 'free', (free / (1024 * 1024)) + "mb");
mysqlSingle.query('DELETE FROM files WHERE hash = ?', torrent.hash);
mysqlSingle.query('DELETE FROM torrents WHERE hash = ?', torrent.hash);
sphinxSingle.query('DELETE FROM files WHERE hash = ?', torrent.hash);
sphinxSingle.query('DELETE FROM torrents WHERE hash = ?', torrent.hash);
})
});
}
@ -379,7 +282,7 @@ app.get('*', function(req, res)
const checkTorrent = (torrent) => {
if(config.filters.maxFiles > 0 && torrent.files > config.filters.maxFiles)
{
console.log('ignore', torrent.name, 'because files', torrent.files, '>', config.filters.maxFiles)
logT('check', 'ignore', torrent.name, 'because files', torrent.files, '>', config.filters.maxFiles)
return false
}
@ -389,37 +292,37 @@ app.get('*', function(req, res)
const rx = new RegExp(nameRX)
if(!config.filters.namingRegExpNegative && !rx.test(torrent.name))
{
console.log('ignore', torrent.name, 'by naming rx')
logT('check', 'ignore', torrent.name, 'by naming rx')
return false
}
else if(config.filters.namingRegExpNegative && rx.test(torrent.name))
{
console.log('ignore', torrent.name, 'by naming rx negative')
logT('check', 'ignore', torrent.name, 'by naming rx negative')
return false
}
}
if(torrent.contentType === 'bad')
{
console.log('ignore torrent', torrent.name, 'because this is a bad thing')
logT('check', 'ignore torrent', torrent.name, 'because this is a bad thing')
return false
}
if(config.filters.adultFilter && torrent.contentCategory === 'xxx')
{
console.log('ignore torrent', torrent.name, 'because adult filter')
logT('check', 'ignore torrent', torrent.name, 'because adult filter')
return false
}
if(config.filters.sizeEnabled && (torrent.size < config.filters.size.min || torrent.size > config.filters.size.max))
{
console.log('ignore torrent', torrent.name, 'because size bounds of', torrent.size, ':', config.filters.size)
logT('check', 'ignore torrent', torrent.name, 'because size bounds of', torrent.size, ':', config.filters.size)
return false
}
if(config.filters.contentType && Array.isArray(config.filters.contentType) && !config.filters.contentType.includes(torrent.contentType))
{
console.log('ignore torrent', torrent.name, 'because type', torrent.contentType, 'not in:', config.filters.contentType)
logT('check', 'ignore torrent', torrent.name, 'because type', torrent.contentType, 'not in:', config.filters.contentType)
return false
}
@ -474,30 +377,76 @@ app.get('*', function(req, res)
if(!filesList || filesList.length == 0)
{
console.log('skip torrent', torrent.name, '- no filesList')
logT('add', 'skip torrent', torrent.name, '- no filesList')
resolve()
return
}
torrent.id = torrentsId++;
mysqlSingle.query("SELECT id FROM torrents WHERE hash = ?", torrent.hash, (err, single) => {
const recheckFiles = (callback) => {
sphinxSingle.query('SELECT count(*) as files_count FROM files WHERE hash = ?', [torrent.hash], function(err, rows) {
if(!rows)
return
const db_files = rows[0]['files_count'];
if(db_files !== torrent.files)
{
callback()
}
})
}
const addFilesToDatabase = () => {
sphinxSingle.query('DELETE FROM files WHERE hash = ?', torrent.hash, function (err, result) {
if(err)
{
return;
}
filesList.forEach((file) => {
file.id = filesId++;
file.pathIndex = file.path;
});
sphinxSingle.insertValues('files', filesList, function(err, result) {
if(!result) {
console.error(err);
return
}
if(!silent)
send('filesReady', torrent.hash);
});
})
}
sphinxSingle.query("SELECT id FROM torrents WHERE hash = ?", torrent.hash, (err, single) => {
if(!single)
{
console.log(err)
logTE('add', err)
resolve()
return
}
// torrent already probably in db
if(single.length > 0)
{
if(config.recheckFilesOnAdding)
{
// recheck files and if they not ok add their to database
recheckFiles(addFilesToDatabase)
}
resolve()
return
}
else
{
addFilesToDatabase()
}
torrent.nameIndex = torrent.name
mysqlSingle.insertValues('torrents', torrent, function(err, result) {
sphinxSingle.insertValues('torrents', torrent, function(err, result) {
if(result) {
if(!silent)
send('newTorrent', {
@ -513,49 +462,19 @@ app.get('*', function(req, res)
}
else
{
console.log(torrent);
console.error(err);
logTE('add', err);
}
resolve()
events.emit('insert', torrent)
});
})
mysqlSingle.query('SELECT count(*) as files_count FROM files WHERE hash = ?', [torrent.hash], function(err, rows) {
if(!rows)
return
const db_files = rows[0]['files_count'];
if(db_files !== torrent.files)
{
mysqlSingle.query('DELETE FROM files WHERE hash = ?', torrent.hash, function (err, result) {
if(err)
{
return;
}
filesList.forEach((file) => {
file.id = filesId++;
file.pathIndex = file.path;
});
mysqlSingle.insertValues('files', filesList, function(err, result) {
if(!result) {
console.error(err);
return
}
if(!silent)
send('filesReady', torrent.hash);
});
})
}
})
})
const removeTorrentFromDB = async (torrent) => {
const {hash} = torrent
await mysqlSingle.query('DELETE FROM torrents WHERE hash = ?', hash)
await mysqlSingle.query('DELETE FROM files WHERE hash = ?', hash)
await sphinxSingle.query('DELETE FROM torrents WHERE hash = ?', hash)
await sphinxSingle.query('DELETE FROM files WHERE hash = ?', hash)
logT('remove', 'removed torrent', torrent.name || torrent.hash)
}
const updateTorrentToDB = async (torrent) => {
@ -571,11 +490,12 @@ app.get('*', function(req, res)
delete torrent.id
delete torrent.filesList
await mysqlSingle.updateValues('torrents', torrent, {hash: torrent.hash})
await sphinxSingle.updateValues('torrents', torrent, {hash: torrent.hash})
logT('update', 'updated torrent', torrent.name)
}
const insertMetadata = (metadata, infohash, rinfo) => {
console.log('finded torrent', metadata.info.name, ' and add to database');
logT('spider', 'finded torrent', metadata.info.name, ' and add to database');
const bufferToString = (buffer) => Buffer.isBuffer(buffer) ? buffer.toString() : buffer
@ -598,19 +518,19 @@ app.get('*', function(req, res)
for(let i = 0; i < metadata.info.files.length; i++)
{
let file = metadata.info.files[i];
let filePath = bufferToString(file.path).join('/');
let filePath = bufferToString(file['path.utf-8'] || file.path).join('/');
filesAdd(filePath, file.length);
size += file.length;
}
}
else
{
filesAdd(bufferToString(metadata.info.name), size)
filesAdd(bufferToString(metadata.info['name.utf-8'] || metadata.info.name), size)
}
const torrentQ = {
hash: hash,
name: bufferToString(metadata.info.name),
name: bufferToString(metadata.info['name.utf-8'] || metadata.info.name),
size: size,
files: filesCount,
piecelength: metadata.info['piece length'],
@ -632,7 +552,7 @@ app.get('*', function(req, res)
{
disk.check(rootPath, function(err, info) {
if (err) {
console.log(err);
logTE('quota', err);
} else {
const {available, free, total} = info;
@ -675,7 +595,7 @@ app.get('*', function(req, res)
}
recive('dropTorrents', (pathTorrents) => {
console.log('drop torrents and replicate from original')
logT('drop', 'drop torrents and replicate from original')
const torrents = pathTorrents.map(path => parseTorrent(fs.readFileSync(path)))
torrents.forEach(torrent => insertMetadata(torrent, torrent.infoHashBuffer, {address: '127.0.0.1', port: 666}))
})
@ -691,7 +611,7 @@ app.get('*', function(req, res)
const {address, port} = stunMsg.getAttribute(STUN_ATTR_XOR_MAPPED_ADDRESS).value
stunServer.close()
console.log('p2p stun ignore my address', address)
logT('stun', 'p2p stun ignore my address', address)
p2p.ignore(address)
// check port avalibility
@ -713,7 +633,7 @@ app.get('*', function(req, res)
ttl: 0
}, function(err) {
if(err)
console.log('upnp server dont respond')
logT('upnp', 'upnp server dont respond')
});
upnp.portMapping({
public: config.spiderPort,
@ -723,7 +643,7 @@ app.get('*', function(req, res)
ttl: 0
}, function(err) {
if(err)
console.log('upnp server dont respond')
logT('upnp', 'upnp server dont respond')
});
upnp.portMapping({
public: config.udpTrackersPort,
@ -733,7 +653,7 @@ app.get('*', function(req, res)
ttl: 0
}, function(err) {
if(err)
console.log('upnp server dont respond')
logT('upnp', 'upnp server dont respond')
});
}
@ -758,7 +678,7 @@ app.get('*', function(req, res)
if(err)
return
console.log('p2p upnp ignore my address', ip)
logT('upnp', 'p2p upnp ignore my address', ip)
p2p.ignore(ip)
});
}
@ -774,6 +694,7 @@ app.get('*', function(req, res)
// setup api
await API({
sphinx,
sphinxSingle: sphinxSingleAlternative,
recive,
send,
p2p,
@ -812,17 +733,20 @@ app.get('*', function(req, res)
}
// load torrents sessions
console.log('restore downloading sessions')
logT('downloader', 'restore downloading sessions')
torrentClient.loadSession(dataDirectory + '/downloads.json')
this.stop = async (callback) => {
this.closing = true
console.log('spider closing...')
logT('close', 'spider closing...')
if(upnp)
upnp.ratsUnmap()
logT('close', 'closing alternative db interface')
await sphinxSingleAlternative.end()
// save torrents sessions
console.log('save torrents downloads sessions')
logT('close', 'save torrents downloads sessions')
torrentClient.saveSession(dataDirectory + '/downloads.json')
// save feed
@ -832,7 +756,7 @@ app.get('*', function(req, res)
if(config.p2pBootstrap && p2pBootstrapLoop)
{
clearInterval(p2pBootstrapLoop)
console.log('bootstrap loop stoped')
logT('close', 'bootstrap loop stoped')
}
// safe future peers
@ -843,7 +767,7 @@ app.get('*', function(req, res)
if(addresses.length > 0)
{
fs.writeFileSync(dataDirectory + '/peers.p2p', peersEncripted, 'utf8');
console.log('local peers saved')
logT('close', 'local peers saved')
}
if(config.p2pBootstrap)
@ -881,7 +805,7 @@ app.get('*', function(req, res)
'Content-Type' : "application/json",
}
};
console.log('bootstrap peers saved to', host)
logT('close', 'bootstrap peers saved to', host)
const req = http.request(options, resolve);
req.on('error', resolve)
req.end(JSON.stringify({
@ -897,7 +821,7 @@ app.get('*', function(req, res)
}
}
console.log('closing p2p...')
logT('close', 'closing p2p...')
// don't listen spider peer appears
spider.removeAllListeners('peer')
await p2p.close()
@ -905,13 +829,15 @@ app.get('*', function(req, res)
// don't listen complete torrent responses
client.removeAllListeners('complete')
torrentClient.destroy(() => {
sphinx.end(() => spider.close(() => {
mysqlSingle.destroy()
console.log('spider closed')
logT('close', 'closing torrent client')
torrentClient.destroy(() => spider.close(async () => {
await sphinx.end()
logT('close', 'pool closed')
await sphinxSingle.end()
logT('close', 'single closed')
logT('close', 'spider closed')
callback()
}))
})
}
})()

View File

@ -46,7 +46,7 @@ const startSSH = (port, host, user, password, callback) => {
}
ssh.stdout.on('data', (data) => {
console.log(`ssh: ${data}`)
logT('ssh', `ssh: ${data}`)
checkMessage(data)
if(data.includes('Store key in cache?'))
{
@ -56,7 +56,7 @@ const startSSH = (port, host, user, password, callback) => {
})
ssh.stderr.on('data', (data) => {
console.log(`ssh error: ${data}`);
logT('ssh', `ssh error: ${data}`);
checkMessage(data)
if(data.includes('Password authentication failed'))
{
@ -70,7 +70,7 @@ const startSSH = (port, host, user, password, callback) => {
});
ssh.on('close', (code, signal) => {
console.log(`ssh closed with code ${code} and signal ${signal}`)
logT('ssh', `ssh closed with code ${code} and signal ${signal}`)
if(callback)
callback(false)
})

View File

@ -13,7 +13,7 @@ module.exports = class P2PStore extends EventEmitter {
});
this.synchronized = false
console.log('connect p2p store...')
logT('store', 'connect p2p store...')
this.p2p = p2p
this.sphinx = sphinx
@ -24,7 +24,7 @@ module.exports = class P2PStore extends EventEmitter {
if(rows[0] && rows[0].mx >= 1)
this.id = rows[0].mx;
console.log('store db index', this.id)
logT('store', 'store db index', this.id)
this.p2p.events.on('peer', (peer) => {
if(peer.info && peer.info.store)
@ -40,7 +40,7 @@ module.exports = class P2PStore extends EventEmitter {
this.p2p.on('dbStore', (record) => {
if(!record || record.id - 1 !== this.id)
{
console.log('out of range peerdb store', record.id)
logT('store', 'out of range peerdb store', record.id)
return
}
@ -51,7 +51,7 @@ module.exports = class P2PStore extends EventEmitter {
})
this.p2p.on('dbSync', ({id} = {}, callback) => {
console.log('ask to sync db from', id, 'version')
logT('store', 'ask to sync db from', id, 'version')
if(typeof id === 'undefined' || id >= this.id)
{
callback(false)
@ -62,7 +62,7 @@ module.exports = class P2PStore extends EventEmitter {
this.sphinx.query(`select * from store where id > ${id}`, (err, records) => {
if(err)
{
console.log(err)
logT('store', err)
return
}
@ -77,7 +77,7 @@ module.exports = class P2PStore extends EventEmitter {
sync(peer)
{
console.log('sync db on version', this.id, peer ? `from peer ${peer.peerId}` : '')
logT('store', 'sync db on version', this.id, peer ? `from peer ${peer.peerId}` : '')
const processSync = (data, nil, peer) => {
if(!data || !data.records)
return
@ -90,7 +90,7 @@ module.exports = class P2PStore extends EventEmitter {
&& oldIndex < this.id // last sync update of store must be successful, otherwise no point to try sync db from this peer
&& this.id < data.index)
{
console.log('continue sync store from', this.id, 'index', 'peer', peer.peerId)
logT('store', 'continue sync store from', this.id, 'index', 'peer', peer.peerId)
peer.emit('dbSync', {id: this.id}, processSync)
}
}
@ -119,7 +119,7 @@ module.exports = class P2PStore extends EventEmitter {
// check hash
if(objectHash(record.data) !== record.hash)
{
console.log('wrong hash for sync peerdb')
logT('store', 'wrong hash for sync peerdb')
return
}
@ -127,7 +127,7 @@ module.exports = class P2PStore extends EventEmitter {
record.myself = false
// push to db
console.log('sync peerdb record', record.id)
logT('store', 'sync peerdb record', record.id)
this._pushToDb(record)
this.id = record.id
@ -145,7 +145,7 @@ module.exports = class P2PStore extends EventEmitter {
(err) => {
if(err)
{
console.log(err)
logTE('store', err)
return
}
@ -159,7 +159,7 @@ module.exports = class P2PStore extends EventEmitter {
{
if(!this.synchronized)
{
console.log('cant store item on unsync db')
logT('store', 'cant store item on unsync db')
return false
}
@ -177,7 +177,7 @@ module.exports = class P2PStore extends EventEmitter {
temp
}
console.log('store object', value.id)
logT('store', 'store object', value.id)
this._pushToDb(value, () => {
// store record

View File

@ -17,7 +17,7 @@ torrentClient.saveSession = (sessionFile) => {
torrentClient.loadSession = (sessionFile) => {
if(!fs.existsSync(sessionFile))
{
console.log('no download sessions - ignore')
logT('downloader', 'no download sessions - ignore')
return
}
@ -25,26 +25,26 @@ torrentClient.loadSession = (sessionFile) => {
const obj = JSON.parse(data);
if(!obj.torrents)
{
console.log('no torrents list for loading session')
logT('downloader', 'no torrents list for loading session')
return
}
if(!torrentClient._add)
{
console.log('no overriden _add() method')
logT('downloader', 'no overriden _add() method')
return
}
const {torrents} = obj
torrents.forEach(({torrent, infoHash, path, removeOnDone, paused}) => {
if(!torrent || !infoHash || !path)
{
console.log('no info for starting download this torrent')
logT('downloader', 'no info for starting download this torrent')
return
}
console.log('restore download session:', torrent.name)
logT('downloader', 'restore download session:', torrent.name)
const download = torrentClient._add(torrent, path)
if(download)
{
console.log('restore options')
logT('downloader', 'restore options')
// restore options
download.removeOnDone = removeOnDone
if(paused)

View File

@ -8,8 +8,8 @@ const forBigTable = require('../src/background/forBigTable')
describe("big table for check", () => {
let sphinx;
it("init", function() {
sphinx = pool()
it("init", async function() {
sphinx = await pool()
expect(sphinx)
})
@ -36,4 +36,8 @@ describe("big table for check", () => {
await forBigTable(sphinx, 'feed', record => records.push(record), null, 15)
expect(records.length === 13)
})
it("close", async function() {
await sphinx.end()
})
});

View File

@ -1,4 +1,6 @@
import {startApplication, stopApplication} from "../tests/application";
global.logT = (...args) => {console.log(...args)}
global.logTE = (...args) => {console.log('error', ...args)}
describe("application", () => {
before(startApplication);

View File

@ -1,4 +1,4 @@
import { expect } from "chai";
import { expect, assert } from "chai";
const mysql = require('mysql')
const config = require('../src/background/config')
@ -61,14 +61,28 @@ describe("sphinx", () => {
})
it("query limit", function(done) {
const sphinx = pool()
const test = async () => {
const sphinx = await pool()
let promises = []
sphinx.query(`delete from feed where id >= 0`, () => {
for(let i = 0; i < 500; i++)
promises.push(sphinx.query(`insert into feed(id, data) values(${i}, 'a')`))
Promise.all(promises).then(() => {
sphinx.query(`delete from feed where id >= 0`, () => done())
sphinx.query(`delete from feed where id >= 0`, async () => {
await sphinx.end()
done()
})
})
})
}
test()
})
it("escape", function () {
assert.equal(sphinx.escape(`naru'to`), `'naru\\'to'`)
})
it("close pool", function(done) {
sphinx.end(done)
})
});

View File

@ -185,6 +185,8 @@
"calculation": "calculation",
"removing": "removing",
"Torrents cleaned": "Torrents cleaned",
"or with hash": "or with hash"
"or with hash": "or with hash",
"Check torrent files intergrity": "Check torrent files intergrity",
"Enable database torrents files intergrity check on adding each torrent. Disable this will free some cpu usage on adding operation.": "Enable database torrents files intergrity check on adding each torrent. Disable this will free some cpu usage on adding operation."
}
}

View File

@ -185,6 +185,8 @@
"calculation": "подсчитывается",
"removing": "удаляется",
"Torrents cleaned": "Торренты очещены",
"or with hash": "или по хэшу"
"or with hash": "или по хэшу",
"Check torrent files intergrity": "Проверка целостности файлов",
"Enable database torrents files intergrity check on adding each torrent. Disable this will free some cpu usage on adding operation.": "Включить проверку целостности файлов в базе при добавлении каждого торрента. Отключение этой опции освободит некоторорое количество ресурсов процессора при добавлении."
}
}

View File

@ -185,6 +185,8 @@
"calculation": "calculation",
"removing": "removing",
"Torrents cleaned": "Torrents cleaned",
"or with hash": "or with hash"
"or with hash": "or with hash",
"Check torrent files intergrity": "Check torrent files intergrity",
"Enable database torrents files intergrity check on adding each torrent. Disable this will free some cpu usage on adding operation.": "Enable database torrents files intergrity check on adding each torrent. Disable this will free some cpu usage on adding operation."
}
}