From 8f34184818cc958cd8f76fc3b3f3295c599b1627 Mon Sep 17 00:00:00 2001 From: Jan Prochazka Date: Sun, 2 Feb 2020 20:28:27 +0100 Subject: [PATCH] postgre analyser --- api/src/engines/mysql/MySqlAnalyser.js | 2 +- api/src/engines/postgres/PostgreAnalyser.js | 51 +++++++++++++++++++ api/src/engines/postgres/columns.psql | 17 +++++++ api/src/engines/postgres/index.js | 17 ++++++- .../engines/postgres/table_modifications.psql | 50 ++++++++++++++++++ 5 files changed, 134 insertions(+), 3 deletions(-) create mode 100644 api/src/engines/postgres/PostgreAnalyser.js create mode 100644 api/src/engines/postgres/columns.psql create mode 100644 api/src/engines/postgres/table_modifications.psql diff --git a/api/src/engines/mysql/MySqlAnalyser.js b/api/src/engines/mysql/MySqlAnalyser.js index 85df767f..17d2dc16 100644 --- a/api/src/engines/mysql/MySqlAnalyser.js +++ b/api/src/engines/mysql/MySqlAnalyser.js @@ -37,7 +37,7 @@ class MySqlAnalyser extends DatabaseAnalayser { this.result.tables = tables.rows.map(table => ({ ...table, columns: columns.rows - .filter(col => col.objectId == table.objectId) + .filter(col => col.pureName == table.pureName) .map(({ isNullable, extra, ...col }) => ({ ...col, notNull: !isNullable, diff --git a/api/src/engines/postgres/PostgreAnalyser.js b/api/src/engines/postgres/PostgreAnalyser.js new file mode 100644 index 00000000..126983af --- /dev/null +++ b/api/src/engines/postgres/PostgreAnalyser.js @@ -0,0 +1,51 @@ +const fs = require('fs-extra'); +const fp = require('lodash/fp'); +const path = require('path'); +const _ = require('lodash'); + +const DatabaseAnalayser = require('../default/DatabaseAnalyser'); + +/** @returns {Promise} */ +async function loadQuery(name) { + return await fs.readFile(path.join(__dirname, name), 'utf-8'); +} + +class MySqlAnalyser extends DatabaseAnalayser { + constructor(pool, driver) { + super(pool, driver); + } + + async createQuery( + resFileName, + tables = false, + views = false, + procedures = false, + functions = false, + triggers = false + ) { + let res = await loadQuery(resFileName); + res = res.replace('=[OBJECT_ID_CONDITION]', ' is not null'); + return res; + } + async runAnalysis() { + const tables = await this.driver.query(this.pool, await this.createQuery('table_modifications.psql')); + const columns = await this.driver.query(this.pool, await this.createQuery('columns.psql')); + // const pkColumns = await this.driver.query(this.pool, await this.createQuery('primary_keys.sql')); + // const fkColumns = await this.driver.query(this.pool, await this.createQuery('foreign_keys.sql')); + + this.result.tables = tables.rows.map(table => ({ + ...table, + columns: columns.rows + .filter(col => col.pureName == table.pureName && col.schemaName == table.schemaName) + .map(({ isNullable, ...col }) => ({ + ...col, + notNull: !isNullable, + })), + foreignKeys: [], + // primaryKey: extractPrimaryKeys(table, pkColumns.rows), + // foreignKeys: extractForeignKeys(table, fkColumns.rows), + })); + } +} + +module.exports = MySqlAnalyser; diff --git a/api/src/engines/postgres/columns.psql b/api/src/engines/postgres/columns.psql new file mode 100644 index 00000000..e0a32b58 --- /dev/null +++ b/api/src/engines/postgres/columns.psql @@ -0,0 +1,17 @@ +select + table_schema as schemaName, + table_name as pureName, + column_name as columnName, + is_nullable as isNullable, + data_type as dataType, + character_maximum_length, + numeric_precision, + numeric_scale, + column_default +from information_schema.columns +where + table_schema <> 'information_schema' + and table_schema <> 'pg_catalog' + and table_schema !~ '^pg_toast' + and 'table:' || table_schema || '.' || table_name =[OBJECT_ID_CONDITION] +order by ordinal_position diff --git a/api/src/engines/postgres/index.js b/api/src/engines/postgres/index.js index 84fca42c..3f9016a8 100644 --- a/api/src/engines/postgres/index.js +++ b/api/src/engines/postgres/index.js @@ -1,4 +1,6 @@ const { Client } = require('pg'); +const PostgreAnalyser = require('./PostgreAnalyser'); +const PostgreDumper = require('./PostgreDumper'); /** @type {import('dbgate').SqlDialect} */ const dialect = { @@ -8,7 +10,8 @@ const dialect = { }, }; -module.exports = { +/** @type {import('dbgate').EngineDriver} */ +const driver = { async connect({ server, port, user, password, database }) { const client = new Client({ host: server, port, user, password, database: database || 'postgres' }); await client.connect(); @@ -16,16 +19,26 @@ module.exports = { }, async query(client, sql) { const res = await client.query(sql); - return { rows: res.rows }; + return { rows: res.rows, columns: res.fields }; }, async getVersion(client) { const { rows } = await this.query(client, 'SELECT version()'); const { version } = rows[0]; return { version }; }, + async analyseFull(pool) { + const analyser = new PostgreAnalyser(pool, this); + await analyser.runAnalysis(); + return analyser.result; + }, + createDumper() { + return new PostgreDumper(this); + }, async listDatabases(client) { const { rows } = await this.query(client, 'SELECT datname AS name FROM pg_database WHERE datistemplate = false'); return rows; }, dialect, }; + +module.exports = driver; diff --git a/api/src/engines/postgres/table_modifications.psql b/api/src/engines/postgres/table_modifications.psql new file mode 100644 index 00000000..8fee2d04 --- /dev/null +++ b/api/src/engines/postgres/table_modifications.psql @@ -0,0 +1,50 @@ +with pkey as +( + select cc.conrelid, format(E'create constraint %I primary key(%s);\n', cc.conname, + string_agg(a.attname, ', ' + order by array_position(cc.conkey, a.attnum))) pkey + from pg_catalog.pg_constraint cc + join pg_catalog.pg_class c on c.oid = cc.conrelid + join pg_catalog.pg_attribute a on a.attrelid = cc.conrelid + and a.attnum = any(cc.conkey) + where cc.contype = 'p' + group by cc.conrelid, cc.conname +) + + +SELECT oid as "objectId", nspname as "schemaName", relname as "pureName", + md5('CREATE TABLE ' || nspname || '.' || relname || E'\n(\n' || + array_to_string( + array_agg( + ' ' || column_name || ' ' || type || ' '|| not_null + ) + , E',\n' + ) || E'\n);\n' || (select pkey from pkey where pkey.conrelid = oid)) as "hash" +from +( + SELECT + c.relname, a.attname AS column_name, c.oid, + n.nspname, + pg_catalog.format_type(a.atttypid, a.atttypmod) as type, + case + when a.attnotnull + then 'NOT NULL' + else 'NULL' + END as not_null + FROM pg_class c, + pg_namespace n, + pg_attribute a, + pg_type t + + WHERE c.relkind = 'r' + AND a.attnum > 0 + AND a.attrelid = c.oid + AND a.atttypid = t.oid + AND n.oid = c.relnamespace + AND n.nspname <> 'pg_catalog' + AND n.nspname <> 'information_schema' + AND n.nspname !~ '^pg_toast' + ORDER BY a.attnum +) as tabledefinition +where 'table:' || nspname || '.' || relname =[OBJECT_ID_CONDITION] +group by relname, nspname, oid