Compare commits

...

9 Commits

Author SHA1 Message Date
de68cb23dd 索引更多数据 2023-10-21 11:18:39 +08:00
c07211706b 索引更多数据 2023-10-21 11:05:25 +08:00
f232c612bf 索引更多数据 2023-10-20 18:02:44 +08:00
86ebef7b5c 索引更多数据 2023-10-20 17:37:44 +08:00
499bc19be5 索引更多数据 2023-10-20 15:40:29 +08:00
e1c57cb63d 索引更多数据 2023-10-19 18:52:55 +08:00
dcbbae4603 优化搜索长度 2023-09-06 10:04:32 +08:00
954af8537b 优化 2023-09-06 09:58:44 +08:00
eb7ce30da9 setTimeout 2023-09-06 00:00:14 +08:00
4 changed files with 1167 additions and 38 deletions

View File

@ -3,7 +3,6 @@ ARG DEBIAN_FRONTEND=noninteractive
RUN mkdir -p /home/node/app/node_modules && chown -R node:node /home/node/app RUN mkdir -p /home/node/app/node_modules && chown -R node:node /home/node/app
WORKDIR /home/node/app WORKDIR /home/node/app
COPY --chown=node:node . . COPY --chown=node:node . .
RUN npm install -g npm
USER node USER node
RUN npm install --force RUN npm install --force
@ -11,4 +10,4 @@ RUN ls -la
RUN npm run buildweb RUN npm run buildweb
EXPOSE 8095 EXPOSE 8095
CMD [ "node", "src/background/server.js" ] CMD [ "node", "src/background/server.js" ]

View File

@ -15,7 +15,8 @@ window.__ = __
if(typeof WEB !== 'undefined') if(typeof WEB !== 'undefined')
{ {
const io = require("socket.io-client"); const io = require("socket.io-client");
window.torrentSocket = io(document.location.protocol + '//' + document.location.hostname + (process.env.NODE_ENV === 'production' ? '/' : ':8095/')); // window.torrentSocket = io(document.location.protocol + '//' + document.location.hostname + (process.env.NODE_ENV === 'production' ? '/' : ':8095/'));
window.torrentSocket = io(document.location.protocol + '//' + document.location.host + '/');
const emit = window.torrentSocket.emit.bind(window.torrentSocket); const emit = window.torrentSocket.emit.bind(window.torrentSocket);
window.torrentSocket.emit = (...data) => { window.torrentSocket.emit = (...data) => {
let id; let id;
@ -85,7 +86,7 @@ else
ipcRenderer.on('url', (event, url) => { ipcRenderer.on('url', (event, url) => {
console.log('url', url) console.log('url', url)
router(url) router(url)
}); });
} }

View File

@ -56,7 +56,7 @@ module.exports = async ({
downloaded: download.downloaded, downloaded: download.downloaded,
progress: download.progress, progress: download.progress,
downloadSpeed: download.downloadSpeed, downloadSpeed: download.downloadSpeed,
removeOnDone: download.removeOnDone, removeOnDone: download.removeOnDone,
paused: torrent.paused || torrent._paused paused: torrent.paused || torrent._paused
} }
@ -79,10 +79,10 @@ module.exports = async ({
return torrents return torrents
} }
const mergeTorrentsWithDownloadsFn = (Fn, copy) => (...args) => { const mergeTorrentsWithDownloadsFn = (Fn, copy) => (...args) => {
const callback = args[args.length - 2] const callback = args[args.length - 2]
const rest = args.slice(0, -2) const rest = args.slice(0, -2)
Fn(...rest, (data) => callback(mergeTorrentsWithDownloads(data, copy)), args[args.length - 1]) Fn(...rest, (data) => callback(mergeTorrentsWithDownloads(data, copy)), args[args.length - 1])
} }
const downloadFilesList = (torrent) => torrent.files.map((file, index) => ({ const downloadFilesList = (torrent) => torrent.files.map((file, index) => ({
@ -215,7 +215,7 @@ module.exports = async ({
p2p.on('randomTorrents', (nil, callback) => { p2p.on('randomTorrents', (nil, callback) => {
if(typeof callback != 'function') if(typeof callback != 'function')
return; return;
const cpu = cpuUsage() const cpu = cpuUsage()
const limit = Math.max(1, 5 - (cpu / 20) | 0) const limit = Math.max(1, 5 - (cpu / 20) | 0)
@ -224,19 +224,19 @@ module.exports = async ({
callback(undefined) callback(undefined)
return; return;
} }
let hashes = {} let hashes = {}
for(const torrent of torrents) for(const torrent of torrents)
{ {
delete torrent.id delete torrent.id
hashes[torrent.hash] = torrent hashes[torrent.hash] = torrent
} }
const inSql = Object.keys(hashes).map(hash => sphinx.escape(hash)).join(','); const inSql = Object.keys(hashes).map(hash => sphinx.escape(hash)).join(',');
sphinxSingle.query(`SELECT * FROM files WHERE hash IN(${inSql})`, (error, files) => { sphinxSingle.query(`SELECT * FROM files WHERE hash IN(${inSql})`, (error, files) => {
for(const file of files) for(const file of files)
hashes[file.hash].filesList = parseTorrentFiles(file); hashes[file.hash].filesList = parseTorrentFiles(file);
callback(Object.values(hashes)) callback(Object.values(hashes))
}) })
}) })
@ -285,7 +285,7 @@ module.exports = async ({
if(typeof callback != 'function') if(typeof callback != 'function')
return; return;
if(!text || text.length <= 2) { if(!text || text.length < 2) {
callback(undefined); callback(undefined);
return; return;
} }
@ -347,7 +347,7 @@ module.exports = async ({
{ {
logT('search', 'get torrent via infohash with dht') logT('search', 'get torrent via infohash with dht')
// 3 try to get torrent from metadata // 3 try to get torrent from metadata
const getTorrentMetadata = (tryCount = 4) => { const getTorrentMetadata = (tryCount = 8) => {
if(tryCount <= 0) { if(tryCount <= 0) {
logT('search', 'dht NOT found anything with dht', text); logT('search', 'dht NOT found anything with dht', text);
return return
@ -356,7 +356,7 @@ module.exports = async ({
dhtCheckTimeout = setTimeout(() => { dhtCheckTimeout = setTimeout(() => {
lock = true lock = true
getTorrentMetadata(--tryCount) getTorrentMetadata(--tryCount)
}, 8000); }, 16000);
torrentClient.getMetadata(text, (torrent) => { torrentClient.getMetadata(text, (torrent) => {
if(lock) { if(lock) {
logT('search', 'this dht response not actual for', text); logT('search', 'this dht response not actual for', text);
@ -407,7 +407,7 @@ module.exports = async ({
if(typeof callback != 'function') if(typeof callback != 'function')
return; return;
if(!text || text.length <= 2) { if(!text || text.length < 2) {
callback(undefined); callback(undefined);
return; return;
} }
@ -455,7 +455,7 @@ module.exports = async ({
for(const torrent of torrents) for(const torrent of torrents)
{ {
search[torrent.hash] = Object.assign(baseRowData(torrent), search[torrent.hash]) search[torrent.hash] = Object.assign(baseRowData(torrent), search[torrent.hash])
// temporary ignore adult content in search (workaroud) // temporary ignore adult content in search (workaroud)
if(safeSearch && search[torrent.hash].contentCategory == 'xxx') if(safeSearch && search[torrent.hash].contentCategory == 'xxx')
delete search[torrent.hash] delete search[torrent.hash]
@ -531,7 +531,7 @@ module.exports = async ({
where += ' and `added` > ' + (Math.floor(Date.now() / 1000) - (60 * 60 * 24 * 30)) where += ' and `added` > ' + (Math.floor(Date.now() / 1000) - (60 * 60 * 24 * 30))
} }
} }
const query = `SELECT * FROM torrents WHERE seeders > 0 and contentCategory != ${torrentCategoryId('xxx')} ${where} ORDER BY seeders DESC LIMIT ${index},${limit}`; const query = `SELECT * FROM torrents WHERE seeders > 0 and contentCategory != ${torrentCategoryId('xxx')} ${where} ORDER BY seeders DESC LIMIT ${index},${limit}`;
if(topCache[query]) if(topCache[query])
{ {
@ -543,7 +543,7 @@ module.exports = async ({
callback(undefined) callback(undefined)
return; return;
} }
rows = rows.map((row) => baseRowData(row)); rows = rows.map((row) => baseRowData(row));
topCache[query] = rows; topCache[query] = rows;
callback(rows); callback(rows);
@ -641,7 +641,7 @@ module.exports = async ({
delete copyConfig['load']; delete copyConfig['load'];
delete copyConfig['reload']; delete copyConfig['reload'];
send('configChanged', copyConfig) send('configChanged', copyConfig)
if(typeof callback === 'function') if(typeof callback === 'function')
callback(true) callback(true)
}); });
@ -697,7 +697,7 @@ module.exports = async ({
send('filesReady', torrent.infoHash, downloadFilesList(torrent)) send('filesReady', torrent.infoHash, downloadFilesList(torrent))
}) })
torrent.on('done', () => { torrent.on('done', () => {
logT('downloader', 'download done', torrent.infoHash) logT('downloader', 'download done', torrent.infoHash)
progress(0) // update progress progress(0) // update progress
// remove torrent if marked // remove torrent if marked
@ -709,7 +709,7 @@ module.exports = async ({
logT('downloader', 'download removing error', err) logT('downloader', 'download removing error', err)
return return
} }
delete torrentClientHashMap[torrent.infoHash] delete torrentClientHashMap[torrent.infoHash]
send('downloadDone', torrent.infoHash) send('downloadDone', torrent.infoHash)
}) })
@ -793,10 +793,10 @@ module.exports = async ({
} }
torrent.updateFilesSelection() torrent.updateFilesSelection()
} }
torrent.updateFilesSelection = () => { torrent.updateFilesSelection = () => {
torrent.deselect(0, torrent.pieces.length - 1, false) torrent.deselect(0, torrent.pieces.length - 1, false)
for(const file of torrent.files) for(const file of torrent.files)
{ {
const {selected} = file const {selected} = file
@ -825,7 +825,7 @@ module.exports = async ({
logT('downloader', 'cant find torrent for removing', hash) logT('downloader', 'cant find torrent for removing', hash)
return return
} }
const torrent = torrentClient.get(id) const torrent = torrentClient.get(id)
if(!torrent) { if(!torrent) {
logT('downloader', 'no torrent for update founded') logT('downloader', 'no torrent for update founded')
@ -913,7 +913,7 @@ module.exports = async ({
downloaded: torrent.downloaded, downloaded: torrent.downloaded,
progress: torrent.progress, progress: torrent.progress,
downloadSpeed: torrent.downloadSpeed, downloadSpeed: torrent.downloadSpeed,
removeOnDone: torrent.removeOnDone, removeOnDone: torrent.removeOnDone,
paused: torrent.paused || torrent._paused paused: torrent.paused || torrent._paused
}))) })))
@ -1018,11 +1018,11 @@ module.exports = async ({
// store torrent to feed // store torrent to feed
await feed.load() await feed.load()
Object.defineProperty(p2p.info, 'feed', { Object.defineProperty(p2p.info, 'feed', {
enumerable: true, enumerable: true,
get: () => feed.size() get: () => feed.size()
}); });
Object.defineProperty(p2p.info, 'feedDate', { Object.defineProperty(p2p.info, 'feedDate', {
enumerable: true, enumerable: true,
get: () => feed.feedDate get: () => feed.feedDate
}); });
@ -1032,9 +1032,9 @@ module.exports = async ({
if(!temp || !temp.torrent) if(!temp || !temp.torrent)
return return
const { torrent } = temp const { torrent } = temp
if(torrent.hash !== record.torrentHash) if(torrent.hash !== record.torrentHash)
return return
@ -1053,7 +1053,7 @@ module.exports = async ({
// update feed only on some good info // update feed only on some good info
if(torrent.good < 1) if(torrent.good < 1)
return return
feed.add(torrent) feed.add(torrent)
send('feedUpdate', { send('feedUpdate', {
feed: feed.feed feed: feed.feed
@ -1083,7 +1083,7 @@ module.exports = async ({
peer.emit('feed', null, (remoteFeed) => { peer.emit('feed', null, (remoteFeed) => {
if(!remoteFeed) if(!remoteFeed)
return return
if(Array.isArray(remoteFeed) || !remoteFeed.feed) if(Array.isArray(remoteFeed) || !remoteFeed.feed)
return // old version call return // old version call
@ -1091,7 +1091,7 @@ module.exports = async ({
logT('feed', 'remote feed have more torrent that needed: ', remoteFeed.feed.length, ' > ', feed.max); logT('feed', 'remote feed have more torrent that needed: ', remoteFeed.feed.length, ' > ', feed.max);
remoteFeed.feed = remoteFeed.feed.slice(0, feed.max); remoteFeed.feed = remoteFeed.feed.slice(0, feed.max);
} }
if(remoteFeed.feed.length > feed.size() || (remoteFeed.feed.length == feed.size() && remoteFeed.feedDate > feed.feedDate)) if(remoteFeed.feed.length > feed.size() || (remoteFeed.feed.length == feed.size() && remoteFeed.feedDate > feed.feedDate))
{ {
logT('feed', 'replace our feed with remote feed') logT('feed', 'replace our feed with remote feed')
@ -1107,5 +1107,5 @@ module.exports = async ({
} }
} }
}) })
} }

File diff suppressed because it is too large Load Diff