dbgate/packages/engines/postgres/index.js

118 lines
3.1 KiB
JavaScript
Raw Normal View History

2020-05-17 05:47:42 +00:00
const _ = require('lodash');
2020-02-02 19:28:27 +00:00
const PostgreAnalyser = require('./PostgreAnalyser');
const PostgreDumper = require('./PostgreDumper');
2020-01-04 20:59:53 +00:00
2020-02-03 18:52:02 +00:00
/** @type {import('@dbgate/types').SqlDialect} */
2020-02-02 18:27:25 +00:00
const dialect = {
rangeSelect: true,
2020-05-17 05:47:42 +00:00
stringEscapeChar: '\\',
2020-02-02 18:27:25 +00:00
quoteIdentifier(s) {
return '"' + s + '"';
},
};
2020-02-03 18:52:02 +00:00
/** @type {import('@dbgate/types').EngineDriver} */
2020-02-02 19:28:27 +00:00
const driver = {
2020-02-03 19:34:38 +00:00
async connect(nativeModules, { server, port, user, password, database }) {
2020-05-17 05:47:42 +00:00
const client = new nativeModules.pg.Client({
host: server,
port,
user,
password,
database: database || 'postgres',
});
2020-01-04 20:59:53 +00:00
await client.connect();
2020-02-03 19:34:38 +00:00
client._nativeModules = nativeModules;
2020-01-04 20:59:53 +00:00
return client;
},
async query(client, sql) {
const res = await client.query(sql);
2020-02-02 19:28:27 +00:00
return { rows: res.rows, columns: res.fields };
2020-01-04 20:59:53 +00:00
},
2020-05-17 05:47:42 +00:00
async stream(client, sql, options) {
const query = new client._nativeModules.pgQueryStream(sql);
const stream = client.query(query);
// const handleInfo = (info) => {
// const { message, lineNumber, procName } = info;
// options.info({
// message,
// line: lineNumber,
// procedure: procName,
// time: new Date(),
// severity: 'info',
// });
// };
let wasHeader = false;
const handleEnd = (result) => {
// console.log('RESULT', result);
options.done(result);
};
const handleReadable = () => {
let row = stream.read();
if (!wasHeader && row) {
options.recordset(_.keys(row).map((columnName) => ({ columnName })));
wasHeader = true;
}
while (row) {
options.row(row);
row = stream.read();
}
};
// const handleFields = (columns) => {
// // console.log('FIELDS', columns[0].name);
// options.recordset(columns);
// // options.recordset(extractColumns(columns));
// };
const handleError = (error) => {
console.log('ERROR', error);
const { message, lineNumber, procName } = error;
options.info({
message,
line: lineNumber,
procedure: procName,
time: new Date(),
severity: 'error',
});
};
stream.on('error', handleError);
stream.on('readable', handleReadable);
// stream.on('result', handleRow)
// stream.on('data', handleRow)
stream.on('end', handleEnd);
return stream;
},
2020-01-04 20:59:53 +00:00
async getVersion(client) {
2020-01-25 16:26:51 +00:00
const { rows } = await this.query(client, 'SELECT version()');
2020-01-04 20:59:53 +00:00
const { version } = rows[0];
return { version };
},
2020-02-02 19:28:27 +00:00
async analyseFull(pool) {
const analyser = new PostgreAnalyser(pool, this);
2020-04-12 08:16:33 +00:00
return analyser.fullAnalysis();
},
async analyseIncremental(pool, structure) {
const analyser = new PostgreAnalyser(pool, this);
return analyser.incrementalAnalysis(structure);
2020-02-02 19:28:27 +00:00
},
createDumper() {
return new PostgreDumper(this);
},
2020-01-04 20:59:53 +00:00
async listDatabases(client) {
2020-01-25 16:26:51 +00:00
const { rows } = await this.query(client, 'SELECT datname AS name FROM pg_database WHERE datistemplate = false');
return rows;
2020-01-04 20:59:53 +00:00
},
2020-02-02 18:27:25 +00:00
dialect,
2020-03-23 19:41:40 +00:00
engine: 'postgres',
2020-01-04 20:59:53 +00:00
};
2020-02-02 19:28:27 +00:00
module.exports = driver;