fix(db): under mac and linux using alternative pool mechanism
this must fix test brokeup and closing stub
This commit is contained in:
parent
725632e709
commit
297baac3d3
@ -113,14 +113,70 @@ const expand = (sphinx) => {
|
|||||||
return sphinx
|
return sphinx
|
||||||
}
|
}
|
||||||
|
|
||||||
const pool = () => {
|
const pool = async () => {
|
||||||
let sphinx = mysql.createPool({
|
if(/^win/.test(process.platform))
|
||||||
// bug under mac with some problems on big connection size, limit this to very low value on mac os x
|
{
|
||||||
connectionLimit: process.platform === 'darwin' ? 3 : config.sphinx.connectionLimit,
|
logT('sql', 'using main pool mechanism')
|
||||||
host : config.sphinx.host,
|
let sphinx = mysql.createPool({
|
||||||
port : config.sphinx.port
|
// bug under mac with some problems on big connection size, limit this to very low value on mac os x
|
||||||
});
|
connectionLimit: process.platform === 'darwin' ? 3 : config.sphinx.connectionLimit,
|
||||||
return expand(sphinx)
|
host : config.sphinx.host,
|
||||||
|
port : config.sphinx.port
|
||||||
|
});
|
||||||
|
sphinx = expand(sphinx)
|
||||||
|
const end = sphinx.end.bind(sphinx)
|
||||||
|
sphinx.end = async (cb) => new Promise(resolve => end(() => {
|
||||||
|
resolve()
|
||||||
|
if(cb) cb()
|
||||||
|
}))
|
||||||
|
return sphinx
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
logT('sql', 'using alternative pool mechanism')
|
||||||
|
let connectionPool = []
|
||||||
|
let connectionsLimit = config.sphinx.connectionLimit
|
||||||
|
let currentConnection = 0
|
||||||
|
for(let i = 0; i < connectionsLimit; i++)
|
||||||
|
{
|
||||||
|
connectionPool[i] = await single().waitConnection()
|
||||||
|
}
|
||||||
|
const buildPoolMethod = (name, ...args) => {
|
||||||
|
if(!connectionPool)
|
||||||
|
return
|
||||||
|
|
||||||
|
const data = connectionPool[currentConnection][name](...args)
|
||||||
|
currentConnection = (currentConnection + 1) % connectionsLimit
|
||||||
|
return data
|
||||||
|
}
|
||||||
|
return new Proxy({
|
||||||
|
query(...args) {
|
||||||
|
return buildPoolMethod('query', ...args)
|
||||||
|
},
|
||||||
|
insertValues(...args) {
|
||||||
|
return buildPoolMethod('insertValues', ...args)
|
||||||
|
},
|
||||||
|
updateValues(...args) {
|
||||||
|
return buildPoolMethod('updateValues', ...args)
|
||||||
|
},
|
||||||
|
async end(cb)
|
||||||
|
{
|
||||||
|
await Promise.all(connectionPool.map(conn => new Promise(resolve => conn.end(resolve))))
|
||||||
|
if(cb)
|
||||||
|
cb()
|
||||||
|
connectionPool = null
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
get(target, prop)
|
||||||
|
{
|
||||||
|
if(!target[prop])
|
||||||
|
{
|
||||||
|
return connectionPool[0][prop]
|
||||||
|
}
|
||||||
|
return target[prop]
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const single = (callback) => {
|
const single = (callback) => {
|
||||||
|
@ -45,7 +45,7 @@ module.exports = function (send, recive, dataDirectory, version, env)
|
|||||||
let filesId = 1;
|
let filesId = 1;
|
||||||
|
|
||||||
const events = new EventEmitter
|
const events = new EventEmitter
|
||||||
let sphinx = pool();
|
let sphinx = await pool();
|
||||||
|
|
||||||
// initialize p2p
|
// initialize p2p
|
||||||
const p2p = new P2PServer(send)
|
const p2p = new P2PServer(send)
|
||||||
|
@ -8,8 +8,8 @@ const forBigTable = require('../src/background/forBigTable')
|
|||||||
describe("big table for check", () => {
|
describe("big table for check", () => {
|
||||||
let sphinx;
|
let sphinx;
|
||||||
|
|
||||||
it("init", function() {
|
it("init", async function() {
|
||||||
sphinx = pool()
|
sphinx = await pool()
|
||||||
expect(sphinx)
|
expect(sphinx)
|
||||||
})
|
})
|
||||||
|
|
||||||
@ -36,4 +36,8 @@ describe("big table for check", () => {
|
|||||||
await forBigTable(sphinx, 'feed', record => records.push(record), null, 15)
|
await forBigTable(sphinx, 'feed', record => records.push(record), null, 15)
|
||||||
expect(records.length === 13)
|
expect(records.length === 13)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it("close", async function() {
|
||||||
|
await sphinx.end()
|
||||||
|
})
|
||||||
});
|
});
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import {startApplication, stopApplication} from "../tests/application";
|
import {startApplication, stopApplication} from "../tests/application";
|
||||||
|
global.logT = (...args) => {console.log(...args)}
|
||||||
|
|
||||||
describe("application", () => {
|
describe("application", () => {
|
||||||
before(startApplication);
|
before(startApplication);
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { expect } from "chai";
|
import { expect, assert } from "chai";
|
||||||
|
|
||||||
const mysql = require('mysql')
|
const mysql = require('mysql')
|
||||||
const config = require('../src/background/config')
|
const config = require('../src/background/config')
|
||||||
@ -60,15 +60,29 @@ describe("sphinx", () => {
|
|||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
it("query limit", function(done) {
|
it("query limit", function(done) {
|
||||||
const sphinx = pool()
|
const test = async () => {
|
||||||
let promises = []
|
const sphinx = await pool()
|
||||||
sphinx.query(`delete from feed where id >= 0`, () => {
|
let promises = []
|
||||||
for(let i = 0; i < 500; i++)
|
sphinx.query(`delete from feed where id >= 0`, () => {
|
||||||
promises.push(sphinx.query(`insert into feed(id, data) values(${i}, 'a')`))
|
for(let i = 0; i < 500; i++)
|
||||||
Promise.all(promises).then(() => {
|
promises.push(sphinx.query(`insert into feed(id, data) values(${i}, 'a')`))
|
||||||
sphinx.query(`delete from feed where id >= 0`, () => done())
|
Promise.all(promises).then(() => {
|
||||||
|
sphinx.query(`delete from feed where id >= 0`, async () => {
|
||||||
|
await sphinx.end()
|
||||||
|
done()
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
})
|
}
|
||||||
|
test()
|
||||||
|
})
|
||||||
|
|
||||||
|
it("escape", function () {
|
||||||
|
assert.equal(sphinx.escape(`naru'to`), `'naru\\'to'`)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("close pool", function(done) {
|
||||||
|
sphinx.end(done)
|
||||||
})
|
})
|
||||||
});
|
});
|
||||||
|
Loading…
Reference in New Issue
Block a user