postgre analyser

This commit is contained in:
Jan Prochazka 2020-02-02 20:28:27 +01:00
parent c0b45a56d8
commit 8f34184818
5 changed files with 134 additions and 3 deletions

View File

@ -37,7 +37,7 @@ class MySqlAnalyser extends DatabaseAnalayser {
this.result.tables = tables.rows.map(table => ({
...table,
columns: columns.rows
.filter(col => col.objectId == table.objectId)
.filter(col => col.pureName == table.pureName)
.map(({ isNullable, extra, ...col }) => ({
...col,
notNull: !isNullable,

View File

@ -0,0 +1,51 @@
const fs = require('fs-extra');
const fp = require('lodash/fp');
const path = require('path');
const _ = require('lodash');
const DatabaseAnalayser = require('../default/DatabaseAnalyser');
/** @returns {Promise<string>} */
async function loadQuery(name) {
return await fs.readFile(path.join(__dirname, name), 'utf-8');
}
class MySqlAnalyser extends DatabaseAnalayser {
constructor(pool, driver) {
super(pool, driver);
}
async createQuery(
resFileName,
tables = false,
views = false,
procedures = false,
functions = false,
triggers = false
) {
let res = await loadQuery(resFileName);
res = res.replace('=[OBJECT_ID_CONDITION]', ' is not null');
return res;
}
async runAnalysis() {
const tables = await this.driver.query(this.pool, await this.createQuery('table_modifications.psql'));
const columns = await this.driver.query(this.pool, await this.createQuery('columns.psql'));
// const pkColumns = await this.driver.query(this.pool, await this.createQuery('primary_keys.sql'));
// const fkColumns = await this.driver.query(this.pool, await this.createQuery('foreign_keys.sql'));
this.result.tables = tables.rows.map(table => ({
...table,
columns: columns.rows
.filter(col => col.pureName == table.pureName && col.schemaName == table.schemaName)
.map(({ isNullable, ...col }) => ({
...col,
notNull: !isNullable,
})),
foreignKeys: [],
// primaryKey: extractPrimaryKeys(table, pkColumns.rows),
// foreignKeys: extractForeignKeys(table, fkColumns.rows),
}));
}
}
module.exports = MySqlAnalyser;

View File

@ -0,0 +1,17 @@
select
table_schema as schemaName,
table_name as pureName,
column_name as columnName,
is_nullable as isNullable,
data_type as dataType,
character_maximum_length,
numeric_precision,
numeric_scale,
column_default
from information_schema.columns
where
table_schema <> 'information_schema'
and table_schema <> 'pg_catalog'
and table_schema !~ '^pg_toast'
and 'table:' || table_schema || '.' || table_name =[OBJECT_ID_CONDITION]
order by ordinal_position

View File

@ -1,4 +1,6 @@
const { Client } = require('pg');
const PostgreAnalyser = require('./PostgreAnalyser');
const PostgreDumper = require('./PostgreDumper');
/** @type {import('dbgate').SqlDialect} */
const dialect = {
@ -8,7 +10,8 @@ const dialect = {
},
};
module.exports = {
/** @type {import('dbgate').EngineDriver} */
const driver = {
async connect({ server, port, user, password, database }) {
const client = new Client({ host: server, port, user, password, database: database || 'postgres' });
await client.connect();
@ -16,16 +19,26 @@ module.exports = {
},
async query(client, sql) {
const res = await client.query(sql);
return { rows: res.rows };
return { rows: res.rows, columns: res.fields };
},
async getVersion(client) {
const { rows } = await this.query(client, 'SELECT version()');
const { version } = rows[0];
return { version };
},
async analyseFull(pool) {
const analyser = new PostgreAnalyser(pool, this);
await analyser.runAnalysis();
return analyser.result;
},
createDumper() {
return new PostgreDumper(this);
},
async listDatabases(client) {
const { rows } = await this.query(client, 'SELECT datname AS name FROM pg_database WHERE datistemplate = false');
return rows;
},
dialect,
};
module.exports = driver;

View File

@ -0,0 +1,50 @@
with pkey as
(
select cc.conrelid, format(E'create constraint %I primary key(%s);\n', cc.conname,
string_agg(a.attname, ', '
order by array_position(cc.conkey, a.attnum))) pkey
from pg_catalog.pg_constraint cc
join pg_catalog.pg_class c on c.oid = cc.conrelid
join pg_catalog.pg_attribute a on a.attrelid = cc.conrelid
and a.attnum = any(cc.conkey)
where cc.contype = 'p'
group by cc.conrelid, cc.conname
)
SELECT oid as "objectId", nspname as "schemaName", relname as "pureName",
md5('CREATE TABLE ' || nspname || '.' || relname || E'\n(\n' ||
array_to_string(
array_agg(
' ' || column_name || ' ' || type || ' '|| not_null
)
, E',\n'
) || E'\n);\n' || (select pkey from pkey where pkey.conrelid = oid)) as "hash"
from
(
SELECT
c.relname, a.attname AS column_name, c.oid,
n.nspname,
pg_catalog.format_type(a.atttypid, a.atttypmod) as type,
case
when a.attnotnull
then 'NOT NULL'
else 'NULL'
END as not_null
FROM pg_class c,
pg_namespace n,
pg_attribute a,
pg_type t
WHERE c.relkind = 'r'
AND a.attnum > 0
AND a.attrelid = c.oid
AND a.atttypid = t.oid
AND n.oid = c.relnamespace
AND n.nspname <> 'pg_catalog'
AND n.nspname <> 'information_schema'
AND n.nspname !~ '^pg_toast'
ORDER BY a.attnum
) as tabledefinition
where 'table:' || nspname || '.' || relname =[OBJECT_ID_CONDITION]
group by relname, nspname, oid