perf(db): optimize some tables to stored_only_fields to recrudesce memory usage of big databases #152

This commit is contained in:
Alexey Kasyanchuk 2021-09-02 01:57:33 +03:00
parent 83a78c4e6c
commit d65f214d4d
4 changed files with 15 additions and 14 deletions

View File

@ -251,7 +251,6 @@ module.exports = async (callback, mainWindow, sphinxApp) => {
if(patchWindow)
patchWindow.webContents.send('reindex', {field: file.path, index: i++, all: files})
file.pathIndex = file.path
await sphinx.query(`DELETE FROM files WHERE id = ${file.id}`)
await sphinx.insertValues('files', file)
})
@ -389,7 +388,6 @@ module.exports = async (callback, mainWindow, sphinxApp) => {
id: newId++,
hash,
path: filesMap[hash][0].path,
pathIndex: filesMap[hash][0].path,
size_new: filesMap[hash][0].size.toString()
});
logT('patcher', 'patched file', fileIndex, 'from', count, 'hash', hash, 'cIndex', ++hashCount);
@ -437,7 +435,7 @@ module.exports = async (callback, mainWindow, sphinxApp) => {
return
file.size = file.size_new.toString();
delete file.size_new;
await sphinx.replaceValues('files', file, {particial: false, sphinxIndex: {pathIndex: 'path'}});
await sphinx.replaceValues('files', file, {particial: false});
if(patchWindow)
patchWindow.webContents.send('reindex', {field: file.id, index: fileIndex, all: count, longTime: false, canBreak: true})
logT('patcher', 'restore patched file', fileIndex++, 'from', count, 'hash', file.hash);

View File

@ -48,10 +48,10 @@ const writeSphinxConfig = async (rootPath, dbPath, params = {}) => {
rt_attr_uint = files
rt_attr_uint = piecelength
rt_attr_timestamp = added
rt_attr_string = ipv4
rt_field = ipv4
rt_attr_uint = port
rt_attr_string = contentType
rt_attr_string = contentCategory
rt_field = contentType
rt_field = contentCategory
rt_attr_uint = seeders
rt_attr_uint = leechers
rt_attr_uint = completed
@ -60,19 +60,23 @@ const writeSphinxConfig = async (rootPath, dbPath, params = {}) => {
rt_attr_uint = bad
rt_attr_json = info
stored_only_fields = contentType, contentCategory, ipv4
ngram_len = 1
ngram_chars = U+3000..U+2FA1F
}
index files
{
type = rt
path = ${dbPath}/database/files
type = rt
path = ${dbPath}/database/files
rt_attr_string = path
rt_field = pathIndex
rt_field = path
rt_attr_string = hash
rt_attr_string = size
rt_field = size
stored_fields = path
stored_only_fields = size
}
index version

View File

@ -606,7 +606,6 @@ module.exports = function (send, recive, dataDirectory, version, env)
id: torrent.id,
hash: torrent.hash,
path,
pathIndex: path,
size
}, function(err, result) {
if(!result) {

View File

@ -17,11 +17,11 @@ describe("sphinx", () => {
})
it("insert", function(done) {
sphinx.query("INSERT INTO files(id, hash, path, pathIndex, size) VALUES(50001, 'a', 'bashaa', 'bashaa', '50')", (err) => {
sphinx.query("INSERT INTO files(id, hash, path, size) VALUES(50001, 'a', 'bashaa', '50')", (err) => {
if(err)
throw new Error(err)
sphinx.query("INSERT INTO files(id, hash, path, pathIndex, size) VALUES(50002, 'b', 'biotu', 'biotu', '30')", (err) => {
sphinx.query("INSERT INTO files(id, hash, path, size) VALUES(50002, 'b', 'biotu', '30')", (err) => {
if(err)
throw new Error(err)