mirror of
https://github.com/dbgate/dbgate
synced 2024-11-07 20:26:23 +00:00
deploy db test WIP
This commit is contained in:
parent
e653b793d8
commit
0974c76fc6
55
integration-tests/__tests__/deploy-database.spec.js
Normal file
55
integration-tests/__tests__/deploy-database.spec.js
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
/// TODO
|
||||||
|
|
||||||
|
const stableStringify = require('json-stable-stringify');
|
||||||
|
const _ = require('lodash');
|
||||||
|
const fp = require('lodash/fp');
|
||||||
|
const uuidv1 = require('uuid/v1');
|
||||||
|
const { testWrapper } = require('../tools');
|
||||||
|
const engines = require('../engines');
|
||||||
|
const { getAlterDatabaseScript, extendDatabaseInfo, generateDbPairingId } = require('dbgate-tools');
|
||||||
|
|
||||||
|
function flatSource() {
|
||||||
|
return _.flatten(
|
||||||
|
engines.map(engine => (engine.objects || []).map(object => [engine.label, object.type, object, engine]))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function testDatabaseDiff(conn, driver, mangle, createObject = null) {
|
||||||
|
await driver.query(conn, `create table t1 (id int not null primary key)`);
|
||||||
|
|
||||||
|
await driver.query(
|
||||||
|
conn,
|
||||||
|
`create table t2 (
|
||||||
|
id int not null primary key,
|
||||||
|
t1_id int null references t1(id)
|
||||||
|
)`
|
||||||
|
);
|
||||||
|
|
||||||
|
if (createObject) await driver.query(conn, createObject);
|
||||||
|
|
||||||
|
const structure1 = generateDbPairingId(extendDatabaseInfo(await driver.analyseFull(conn)));
|
||||||
|
let structure2 = _.cloneDeep(structure1);
|
||||||
|
mangle(structure2);
|
||||||
|
structure2 = extendDatabaseInfo(structure2);
|
||||||
|
|
||||||
|
const { sql } = getAlterDatabaseScript(structure1, structure2, {}, structure2, driver);
|
||||||
|
console.log('RUNNING ALTER SQL', driver.engine, ':', sql);
|
||||||
|
|
||||||
|
await driver.script(conn, sql);
|
||||||
|
|
||||||
|
const structure2Real = extendDatabaseInfo(await driver.analyseFull(conn));
|
||||||
|
|
||||||
|
expect(structure2Real.tables.length).toEqual(structure2.tables.length);
|
||||||
|
return structure2Real;
|
||||||
|
}
|
||||||
|
|
||||||
|
describe('Deploy database', () => {
|
||||||
|
test.each(engines.map(engine => [engine.label, engine]))(
|
||||||
|
'Drop referenced table - %s',
|
||||||
|
testWrapper(async (conn, driver, engine) => {
|
||||||
|
await testDatabaseDiff(conn, driver, db => {
|
||||||
|
_.remove(db.tables, x => x.pureName == 't1');
|
||||||
|
});
|
||||||
|
})
|
||||||
|
);
|
||||||
|
});
|
@ -1,70 +1,30 @@
|
|||||||
const fs = require('fs-extra');
|
const fs = require('fs-extra');
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
const yaml = require('js-yaml');
|
const yaml = require('js-yaml');
|
||||||
const { tableInfoFromYaml, DatabaseAnalyser } = require('dbgate-tools');
|
const { databaseInfoFromYamlModel, DatabaseAnalyser } = require('dbgate-tools');
|
||||||
const { startsWith } = require('lodash');
|
const { startsWith } = require('lodash');
|
||||||
const { archivedir } = require('./directories');
|
const { archivedir } = require('./directories');
|
||||||
|
|
||||||
async function importDbModel(inputDir) {
|
async function importDbModel(inputDir) {
|
||||||
const tablesYaml = [];
|
const files = [];
|
||||||
|
|
||||||
const model = DatabaseAnalyser.createEmptyStructure();
|
|
||||||
|
|
||||||
const dir = inputDir.startsWith('archive:')
|
const dir = inputDir.startsWith('archive:')
|
||||||
? path.join(archivedir(), inputDir.substring('archive:'.length))
|
? path.join(archivedir(), inputDir.substring('archive:'.length))
|
||||||
: inputDir;
|
: inputDir;
|
||||||
|
|
||||||
for (const file of await fs.readdir(dir)) {
|
for (const name of await fs.readdir(dir)) {
|
||||||
if (file.endsWith('.table.yaml') || file.endsWith('.sql')) {
|
if (name.endsWith('.table.yaml') || name.endsWith('.sql')) {
|
||||||
const content = await fs.readFile(path.join(dir, file), { encoding: 'utf-8' });
|
const text = await fs.readFile(path.join(dir, name), { encoding: 'utf-8' });
|
||||||
|
|
||||||
if (file.endsWith('.table.yaml')) {
|
files.push({
|
||||||
const json = yaml.load(content);
|
name,
|
||||||
tablesYaml.push(json);
|
text,
|
||||||
}
|
json: name.endsWith('.yaml') ? yaml.load(text) : null,
|
||||||
|
|
||||||
if (file.endsWith('.view.sql')) {
|
|
||||||
model.views.push({
|
|
||||||
pureName: file.slice(0, -'.view.sql'.length),
|
|
||||||
createSql: content,
|
|
||||||
columns: [],
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (file.endsWith('.matview.sql')) {
|
|
||||||
model.matviews.push({
|
|
||||||
pureName: file.slice(0, -'.matview.sql'.length),
|
|
||||||
createSql: content,
|
|
||||||
columns: [],
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (file.endsWith('.proc.sql')) {
|
|
||||||
model.procedures.push({
|
|
||||||
pureName: file.slice(0, -'.proc.sql'.length),
|
|
||||||
createSql: content,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (file.endsWith('.func.sql')) {
|
|
||||||
model.functions.push({
|
|
||||||
pureName: file.slice(0, -'.func.sql'.length),
|
|
||||||
createSql: content,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (file.endsWith('.trigger.sql')) {
|
|
||||||
model.triggers.push({
|
|
||||||
pureName: file.slice(0, -'.trigger.sql'.length),
|
|
||||||
createSql: content,
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
model.tables = tablesYaml.map(table => tableInfoFromYaml(table, tablesYaml));
|
return databaseInfoFromYamlModel(files);
|
||||||
|
|
||||||
return model;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = importDbModel;
|
module.exports = importDbModel;
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
import { ColumnInfo, TableInfo, ForeignKeyInfo } from 'dbgate-types';
|
import { ColumnInfo, TableInfo, ForeignKeyInfo, DatabaseInfo } from 'dbgate-types';
|
||||||
import _ from 'lodash';
|
|
||||||
import _cloneDeep from 'lodash/cloneDeep';
|
import _cloneDeep from 'lodash/cloneDeep';
|
||||||
|
import _compact from 'lodash/compact';
|
||||||
|
import { DatabaseAnalyser } from './DatabaseAnalyser';
|
||||||
|
|
||||||
export interface ColumnInfoYaml {
|
export interface ColumnInfoYaml {
|
||||||
name: string;
|
name: string;
|
||||||
@ -11,6 +12,11 @@ export interface ColumnInfoYaml {
|
|||||||
primaryKey?: boolean;
|
primaryKey?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface DatabaseModelFile {
|
||||||
|
name: string;
|
||||||
|
text: string;
|
||||||
|
json: {};
|
||||||
|
}
|
||||||
export interface TableInfoYaml {
|
export interface TableInfoYaml {
|
||||||
name: string;
|
name: string;
|
||||||
// schema?: string;
|
// schema?: string;
|
||||||
@ -78,7 +84,11 @@ export function tableInfoToYaml(table: TableInfo): TableInfoYaml {
|
|||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
function convertForeignKeyFromYaml(col: ColumnInfoYaml, table: TableInfoYaml, allTables: TableInfoYaml[]): ForeignKeyInfo {
|
function convertForeignKeyFromYaml(
|
||||||
|
col: ColumnInfoYaml,
|
||||||
|
table: TableInfoYaml,
|
||||||
|
allTables: TableInfoYaml[]
|
||||||
|
): ForeignKeyInfo {
|
||||||
const refTable = allTables.find(x => x.name == col.references);
|
const refTable = allTables.find(x => x.name == col.references);
|
||||||
if (!refTable || !refTable.primaryKey) return null;
|
if (!refTable || !refTable.primaryKey) return null;
|
||||||
return {
|
return {
|
||||||
@ -98,7 +108,7 @@ export function tableInfoFromYaml(table: TableInfoYaml, allTables: TableInfoYaml
|
|||||||
const res: TableInfo = {
|
const res: TableInfo = {
|
||||||
pureName: table.name,
|
pureName: table.name,
|
||||||
columns: table.columns.map(c => columnInfoFromYaml(c, table)),
|
columns: table.columns.map(c => columnInfoFromYaml(c, table)),
|
||||||
foreignKeys: _.compact(
|
foreignKeys: _compact(
|
||||||
table.columns.filter(x => x.references).map(col => convertForeignKeyFromYaml(col, table, allTables))
|
table.columns.filter(x => x.references).map(col => convertForeignKeyFromYaml(col, table, allTables))
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
@ -111,3 +121,54 @@ export function tableInfoFromYaml(table: TableInfoYaml, allTables: TableInfoYaml
|
|||||||
}
|
}
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function databaseInfoFromYamlModel(files: DatabaseModelFile[]): DatabaseInfo {
|
||||||
|
const model = DatabaseAnalyser.createEmptyStructure();
|
||||||
|
const tablesYaml = [];
|
||||||
|
|
||||||
|
for (const file of files) {
|
||||||
|
if (file.name.endsWith('.table.yaml') || file.name.endsWith('.sql')) {
|
||||||
|
if (file.name.endsWith('.table.yaml')) {
|
||||||
|
tablesYaml.push(file.json);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (file.name.endsWith('.view.sql')) {
|
||||||
|
model.views.push({
|
||||||
|
pureName: file.name.slice(0, -'.view.sql'.length),
|
||||||
|
createSql: file.text,
|
||||||
|
columns: [],
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (file.name.endsWith('.matview.sql')) {
|
||||||
|
model.matviews.push({
|
||||||
|
pureName: file.name.slice(0, -'.matview.sql'.length),
|
||||||
|
createSql: file.text,
|
||||||
|
columns: [],
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (file.name.endsWith('.proc.sql')) {
|
||||||
|
model.procedures.push({
|
||||||
|
pureName: file.name.slice(0, -'.proc.sql'.length),
|
||||||
|
createSql: file.text,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (file.name.endsWith('.func.sql')) {
|
||||||
|
model.functions.push({
|
||||||
|
pureName: file.name.slice(0, -'.func.sql'.length),
|
||||||
|
createSql: file.text,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (file.name.endsWith('.trigger.sql')) {
|
||||||
|
model.triggers.push({
|
||||||
|
pureName: file.name.slice(0, -'.trigger.sql'.length),
|
||||||
|
createSql: file.text,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return model;
|
||||||
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user