mirror of
https://github.com/dbgate/dbgate
synced 2024-11-07 20:26:23 +00:00
Merge branch 'master' of github.com:dbgate/dbgate
This commit is contained in:
commit
5c703c786d
@ -22,7 +22,7 @@ DbGate is licensed under MIT license and is completely free.
|
||||
* MySQL
|
||||
* PostgreSQL
|
||||
* SQL Server
|
||||
* Oracle (experimental)
|
||||
* Oracle
|
||||
* MongoDB
|
||||
* Redis
|
||||
* SQLite
|
||||
|
@ -5,9 +5,6 @@ function adjustFile(file) {
|
||||
if (process.platform != 'win32') {
|
||||
delete json.optionalDependencies.msnodesqlv8;
|
||||
}
|
||||
if (process.arch == 'arm64') {
|
||||
delete json.optionalDependencies.oracledb;
|
||||
}
|
||||
fs.writeFileSync(file, JSON.stringify(json, null, 2), 'utf-8');
|
||||
}
|
||||
|
||||
|
@ -121,7 +121,6 @@
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"better-sqlite3": "9.6.0",
|
||||
"msnodesqlv8": "^4.2.1",
|
||||
"oracledb": "^5.5.0"
|
||||
"msnodesqlv8": "^4.2.1"
|
||||
}
|
||||
}
|
||||
|
@ -1944,11 +1944,6 @@ open@^7.4.2:
|
||||
is-docker "^2.0.0"
|
||||
is-wsl "^2.1.1"
|
||||
|
||||
oracledb@^5.5.0:
|
||||
version "5.5.0"
|
||||
resolved "https://registry.yarnpkg.com/oracledb/-/oracledb-5.5.0.tgz#0cf9af5d0c0815f74849ae9ed56aee823514d71b"
|
||||
integrity sha512-i5cPvMENpZP8nnqptB6l0pjiOyySj1IISkbM4Hr3yZEDdANo2eezarwZb9NQ8fTh5pRjmgpZdSyIbnn9N3AENw==
|
||||
|
||||
os-tmpdir@~1.0.2:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274"
|
||||
|
@ -5,9 +5,6 @@ let fillContent = '';
|
||||
if (process.platform == 'win32') {
|
||||
fillContent += `content.msnodesqlv8 = () => require('msnodesqlv8');`;
|
||||
}
|
||||
if (process.arch != 'arm64') {
|
||||
fillContent += `content.oracledb = () => require('oracledb');`;
|
||||
}
|
||||
fillContent += `content['better-sqlite3'] = () => require('better-sqlite3');`;
|
||||
|
||||
const getContent = empty => `
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"private": true,
|
||||
"version": "5.2.10-beta.1",
|
||||
"version": "5.2.10-beta.3",
|
||||
"name": "dbgate-all",
|
||||
"workspaces": [
|
||||
"packages/*",
|
||||
|
@ -26,10 +26,10 @@
|
||||
"compare-versions": "^3.6.0",
|
||||
"cors": "^2.8.5",
|
||||
"cross-env": "^6.0.3",
|
||||
"dbgate-query-splitter": "^4.9.3",
|
||||
"dbgate-datalib": "^5.0.0-alpha.1",
|
||||
"dbgate-query-splitter": "^4.10.1",
|
||||
"dbgate-sqltree": "^5.0.0-alpha.1",
|
||||
"dbgate-tools": "^5.0.0-alpha.1",
|
||||
"dbgate-datalib": "^5.0.0-alpha.1",
|
||||
"debug": "^4.3.4",
|
||||
"diff": "^5.0.0",
|
||||
"diff2html": "^3.4.13",
|
||||
@ -60,7 +60,7 @@
|
||||
"tar": "^6.0.5"
|
||||
},
|
||||
"scripts": {
|
||||
"start": "env-cmd -f .env.local node src/index.js --listen-api",
|
||||
"start": "env-cmd -f .env node src/index.js --listen-api",
|
||||
"start:portal": "env-cmd -f env/portal/.env node src/index.js --listen-api",
|
||||
"start:singledb": "env-cmd -f env/singledb/.env node src/index.js --listen-api",
|
||||
"start:auth": "env-cmd -f env/auth/.env node src/index.js --listen-api",
|
||||
@ -83,7 +83,6 @@
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"better-sqlite3": "9.6.0",
|
||||
"msnodesqlv8": "^4.2.1",
|
||||
"oracledb": "^5.5.0"
|
||||
"msnodesqlv8": "^4.2.1"
|
||||
}
|
||||
}
|
||||
|
@ -61,6 +61,7 @@ function getPortalCollections() {
|
||||
useDatabaseUrl: !!process.env[`URL_${id}`],
|
||||
databaseFile: process.env[`FILE_${id}`],
|
||||
socketPath: process.env[`SOCKET_PATH_${id}`],
|
||||
serviceName: process.env[`SERVICE_NAME_${id}`],
|
||||
authType: process.env[`AUTH_TYPE_${id}`] || (process.env[`SOCKET_PATH_${id}`] ? 'socket' : undefined),
|
||||
defaultDatabase:
|
||||
process.env[`DATABASE_${id}`] ||
|
||||
|
@ -31,7 +31,7 @@
|
||||
"typescript": "^4.4.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"dbgate-query-splitter": "^4.9.3",
|
||||
"dbgate-query-splitter": "^4.10.1",
|
||||
"dbgate-sqltree": "^5.0.0-alpha.1",
|
||||
"debug": "^4.3.4",
|
||||
"json-stable-stringify": "^1.0.1",
|
||||
|
@ -180,8 +180,15 @@ export class DatabaseAnalyser {
|
||||
// return this.createQueryCore('=OBJECT_ID_CONDITION', typeFields) != ' is not null';
|
||||
// }
|
||||
|
||||
createQuery(template, typeFields) {
|
||||
return this.createQueryCore(template, typeFields);
|
||||
createQuery(template, typeFields, replacements = {}) {
|
||||
return this.createQueryCore(this.processQueryReplacements(template, replacements), typeFields);
|
||||
}
|
||||
|
||||
processQueryReplacements(query, replacements) {
|
||||
for (const repl in replacements) {
|
||||
query = query.replaceAll(repl, replacements[repl]);
|
||||
}
|
||||
return query;
|
||||
}
|
||||
|
||||
createQueryCore(template, typeFields) {
|
||||
@ -302,8 +309,8 @@ export class DatabaseAnalyser {
|
||||
return [..._compact(res), ...this.getDeletedObjects(snapshot)];
|
||||
}
|
||||
|
||||
async analyserQuery(template, typeFields) {
|
||||
const sql = this.createQuery(template, typeFields);
|
||||
async analyserQuery(template, typeFields, replacements = {}) {
|
||||
const sql = this.createQuery(template, typeFields, replacements);
|
||||
|
||||
if (!sql) {
|
||||
return {
|
||||
@ -311,7 +318,9 @@ export class DatabaseAnalyser {
|
||||
};
|
||||
}
|
||||
try {
|
||||
return await this.driver.query(this.pool, sql);
|
||||
const res = await this.driver.query(this.pool, sql);
|
||||
this.logger.debug({ rows: res.rows.length, template }, `Loaded analyser query`);
|
||||
return res;
|
||||
} catch (err) {
|
||||
logger.error({ err }, 'Error running analyser query');
|
||||
return {
|
||||
|
@ -199,14 +199,8 @@ export class SqlDumper implements AlterProcessor {
|
||||
|
||||
selectScopeIdentity(table: TableInfo) {}
|
||||
|
||||
columnDefinition(column: ColumnInfo, { includeDefault = true, includeNullable = true, includeCollate = true } = {}) {
|
||||
if (column.computedExpression) {
|
||||
this.put('^as %s', column.computedExpression);
|
||||
if (column.isPersisted) this.put(' ^persisted');
|
||||
return;
|
||||
}
|
||||
|
||||
const type = column.dataType || this.dialect.fallbackDataType;
|
||||
columnType(dataType: string) {
|
||||
const type = dataType || this.dialect.fallbackDataType;
|
||||
const typeWithValues = type.match(/([^(]+)(\(.+[^)]\))/);
|
||||
|
||||
if (typeWithValues?.length) {
|
||||
@ -217,6 +211,17 @@ export class SqlDumper implements AlterProcessor {
|
||||
this.putRaw(SqlDumper.convertKeywordCase(type));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
columnDefinition(column: ColumnInfo, { includeDefault = true, includeNullable = true, includeCollate = true } = {}) {
|
||||
if (column.computedExpression) {
|
||||
this.put('^as %s', column.computedExpression);
|
||||
if (column.isPersisted) this.put(' ^persisted');
|
||||
return;
|
||||
}
|
||||
|
||||
this.columnType(column.dataType);
|
||||
|
||||
if (column.autoIncrement) {
|
||||
this.autoIncrement();
|
||||
}
|
||||
|
@ -56,24 +56,42 @@ export function createBulkInsertStreamBase(driver: EngineDriver, stream, pool, n
|
||||
const rows = writable.buffer;
|
||||
writable.buffer = [];
|
||||
|
||||
const dmp = driver.createDumper();
|
||||
if (driver.dialect.allowMultipleValuesInsert) {
|
||||
const dmp = driver.createDumper();
|
||||
dmp.putRaw(`INSERT INTO ${fullNameQuoted} (`);
|
||||
dmp.putCollection(',', writable.columnNames, col => dmp.putRaw(driver.dialect.quoteIdentifier(col as string)));
|
||||
dmp.putRaw(')\n VALUES\n');
|
||||
|
||||
dmp.putRaw(`INSERT INTO ${fullNameQuoted} (`);
|
||||
dmp.putCollection(',', writable.columnNames, col => dmp.putRaw(driver.dialect.quoteIdentifier(col as string)));
|
||||
dmp.putRaw(')\n VALUES\n');
|
||||
let wasRow = false;
|
||||
for (const row of rows) {
|
||||
if (wasRow) dmp.putRaw(',\n');
|
||||
dmp.putRaw('(');
|
||||
dmp.putCollection(',', writable.columnNames, col => dmp.putValue(row[col as string]));
|
||||
dmp.putRaw(')');
|
||||
wasRow = true;
|
||||
}
|
||||
dmp.putRaw(';');
|
||||
// require('fs').writeFileSync('/home/jena/test.sql', dmp.s);
|
||||
// console.log(dmp.s);
|
||||
await driver.query(pool, dmp.s, { discardResult: true });
|
||||
} else {
|
||||
for (const row of rows) {
|
||||
const dmp = driver.createDumper();
|
||||
dmp.putRaw(`INSERT INTO ${fullNameQuoted} (`);
|
||||
dmp.putCollection(',', writable.columnNames, col => dmp.putRaw(driver.dialect.quoteIdentifier(col as string)));
|
||||
dmp.putRaw(')\n VALUES\n');
|
||||
|
||||
let wasRow = false;
|
||||
for (const row of rows) {
|
||||
if (wasRow) dmp.putRaw(',\n');
|
||||
dmp.putRaw('(');
|
||||
dmp.putCollection(',', writable.columnNames, col => dmp.putValue(row[col as string]));
|
||||
dmp.putRaw(')');
|
||||
wasRow = true;
|
||||
dmp.putRaw('(');
|
||||
dmp.putCollection(',', writable.columnNames, col => dmp.putValue(row[col as string]));
|
||||
dmp.putRaw(')');
|
||||
await driver.query(pool, dmp.s, { discardResult: true });
|
||||
}
|
||||
}
|
||||
if (options.commitAfterInsert) {
|
||||
const dmp = driver.createDumper();
|
||||
dmp.commitTransaction();
|
||||
await driver.query(pool, dmp.s, { discardResult: true });
|
||||
}
|
||||
dmp.putRaw(';');
|
||||
// require('fs').writeFileSync('/home/jena/test.sql', dmp.s);
|
||||
// console.log(dmp.s);
|
||||
await driver.query(pool, dmp.s, { discardResult: true });
|
||||
};
|
||||
|
||||
writable.sendIfFull = async () => {
|
||||
|
1
packages/types/dialect.d.ts
vendored
1
packages/types/dialect.d.ts
vendored
@ -12,6 +12,7 @@ export interface SqlDialect {
|
||||
defaultSchemaName?: string;
|
||||
enableConstraintsPerTable?: boolean;
|
||||
requireStandaloneSelectForScopeIdentity?: boolean;
|
||||
allowMultipleValuesInsert?: boolean;
|
||||
|
||||
dropColumnDependencies?: string[];
|
||||
changeColumnDependencies?: string[];
|
||||
|
1
packages/types/engines.d.ts
vendored
1
packages/types/engines.d.ts
vendored
@ -24,6 +24,7 @@ export interface WriteTableOptions {
|
||||
dropIfExists?: boolean;
|
||||
truncate?: boolean;
|
||||
createIfNotExists?: boolean;
|
||||
commitAfterInsert?: boolean;
|
||||
}
|
||||
|
||||
export interface EngineAuthType {
|
||||
|
@ -24,7 +24,7 @@
|
||||
"chartjs-adapter-moment": "^1.0.0",
|
||||
"cross-env": "^7.0.3",
|
||||
"dbgate-datalib": "^5.0.0-alpha.1",
|
||||
"dbgate-query-splitter": "^4.9.3",
|
||||
"dbgate-query-splitter": "^4.10.1",
|
||||
"dbgate-sqltree": "^5.0.0-alpha.1",
|
||||
"dbgate-tools": "^5.0.0-alpha.1",
|
||||
"dbgate-types": "^5.0.0-alpha.1",
|
||||
|
@ -123,6 +123,10 @@
|
||||
{/if}
|
||||
{/if}
|
||||
|
||||
{#if driver?.showConnectionField('serviceName', $values)}
|
||||
<FormTextField label="Service name" name="serviceName" disabled={isConnected} />
|
||||
{/if}
|
||||
|
||||
{#if driver?.showConnectionField('socketPath', $values)}
|
||||
<FormTextField
|
||||
label="Socket path"
|
||||
|
@ -84,6 +84,7 @@
|
||||
'defaultDatabase',
|
||||
'singleDatabase',
|
||||
'socketPath',
|
||||
'serviceName',
|
||||
];
|
||||
const visibleProps = allProps.filter(x => driver?.showConnectionField(x, $values));
|
||||
const omitProps = _.difference(allProps, visibleProps);
|
||||
|
@ -24,6 +24,9 @@ function getConnectionLabelCore(connection, { allowExplicitDatabase = true } = {
|
||||
if (connection.singleDatabase && connection.defaultDatabase) {
|
||||
return `${connection.defaultDatabase}`;
|
||||
}
|
||||
if (connection.useDatabaseUrl) {
|
||||
return `${connection.databaseUrl}`;
|
||||
}
|
||||
|
||||
return '';
|
||||
}
|
||||
|
@ -32,7 +32,7 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"dbgate-plugin-tools": "^1.0.7",
|
||||
"dbgate-query-splitter": "^4.9.3",
|
||||
"dbgate-query-splitter": "^4.10.1",
|
||||
"webpack": "^5.91.0",
|
||||
"webpack-cli": "^5.1.4",
|
||||
"dbgate-tools": "^5.0.0-alpha.1",
|
||||
|
@ -32,7 +32,7 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"dbgate-plugin-tools": "^1.0.7",
|
||||
"dbgate-query-splitter": "^4.9.3",
|
||||
"dbgate-query-splitter": "^4.10.1",
|
||||
"webpack": "^5.91.0",
|
||||
"webpack-cli": "^5.1.4",
|
||||
"dbgate-tools": "^5.0.0-alpha.1",
|
||||
|
@ -33,7 +33,7 @@
|
||||
"devDependencies": {
|
||||
"antares-mysql-dumper": "^0.0.1",
|
||||
"dbgate-plugin-tools": "^1.0.7",
|
||||
"dbgate-query-splitter": "^4.9.3",
|
||||
"dbgate-query-splitter": "^4.10.1",
|
||||
"dbgate-tools": "^5.0.0-alpha.1",
|
||||
"mysql2": "^3.9.7",
|
||||
"webpack": "^5.91.0",
|
||||
|
@ -22,6 +22,7 @@ const dialect = {
|
||||
enableConstraintsPerTable: false,
|
||||
anonymousPrimaryKey: true,
|
||||
explicitDropConstraint: true,
|
||||
allowMultipleValuesInsert: true,
|
||||
quoteIdentifier(s) {
|
||||
return '`' + s + '`';
|
||||
},
|
||||
|
@ -31,10 +31,13 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"dbgate-plugin-tools": "^1.0.8",
|
||||
"dbgate-query-splitter": "^4.9.0",
|
||||
"dbgate-query-splitter": "^4.10.1",
|
||||
"dbgate-tools": "^5.0.0-alpha.1",
|
||||
"lodash": "^4.17.21",
|
||||
"webpack": "^5.91.0",
|
||||
"webpack-cli": "^5.1.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"oracledb": "^6.5.1"
|
||||
}
|
||||
}
|
||||
|
@ -13,38 +13,18 @@ function normalizeTypeName(dataType) {
|
||||
|
||||
function getColumnInfo(
|
||||
{ is_nullable, column_name, data_type, char_max_length, numeric_precision, numeric_ccale, default_value },
|
||||
table = undefined,
|
||||
geometryColumns = undefined,
|
||||
geographyColumns = undefined
|
||||
table = undefined
|
||||
) {
|
||||
const normDataType = normalizeTypeName(data_type);
|
||||
let fullDataType = normDataType;
|
||||
if (char_max_length && isTypeString(normDataType)) fullDataType = `${normDataType}(${char_max_length})`;
|
||||
if (numeric_precision && numeric_ccale && isTypeNumeric(normDataType))
|
||||
fullDataType = `${normDataType}(${numeric_precision},${numeric_ccale})`;
|
||||
const autoIncrement = !!(default_value && default_value.startsWith('nextval('));
|
||||
if (
|
||||
table &&
|
||||
geometryColumns &&
|
||||
geometryColumns.rows.find(
|
||||
x => x.schema_name == table.schemaName && x.pure_name == table.pureName && x.column_name == column_name
|
||||
)
|
||||
) {
|
||||
fullDataType = 'geometry';
|
||||
}
|
||||
if (
|
||||
table &&
|
||||
geographyColumns &&
|
||||
geographyColumns.rows.find(
|
||||
x => x.schema_name == table.schemaName && x.pure_name == table.pureName && x.column_name == column_name
|
||||
)
|
||||
) {
|
||||
fullDataType = 'geography';
|
||||
}
|
||||
const autoIncrement = !!(default_value && default_value.endsWith('.nextval'));
|
||||
return {
|
||||
columnName: column_name,
|
||||
dataType: fullDataType,
|
||||
notNull: !is_nullable || is_nullable == 'NO' || is_nullable == 'no',
|
||||
notNull: is_nullable == 'N',
|
||||
defaultValue: autoIncrement ? undefined : default_value,
|
||||
autoIncrement,
|
||||
};
|
||||
@ -55,58 +35,50 @@ class Analyser extends DatabaseAnalyser {
|
||||
super(pool, driver, version);
|
||||
}
|
||||
|
||||
createQuery(resFileName, typeFields) {
|
||||
const query = super.createQuery(sql[resFileName], typeFields);
|
||||
createQuery(resFileName, typeFields, replacements = {}) {
|
||||
const query = super.createQuery(sql[resFileName], typeFields, replacements);
|
||||
//if (query) return query.replace('#REFTABLECOND#', this.driver.__analyserInternals.refTableCond);
|
||||
return query;
|
||||
}
|
||||
|
||||
async _computeSingleObjectId() {
|
||||
const { typeField, schemaName, pureName } = this.singleObjectFilter;
|
||||
this.singleObjectId = `${typeField}:${schemaName || 'public'}.${pureName}`;
|
||||
const { typeField, pureName } = this.singleObjectFilter;
|
||||
this.singleObjectId = `${typeField}:${pureName}`;
|
||||
}
|
||||
|
||||
async _runAnalysis() {
|
||||
this.feedback({ analysingMessage: 'Loading tables' });
|
||||
const tables = await this.analyserQuery(this.driver.dialect.stringAgg ? 'tableList' : 'tableList', ['tables']);
|
||||
const tables = await this.analyserQuery('tableList', ['tables'], { $owner: this.pool._schema_name });
|
||||
this.feedback({ analysingMessage: 'Loading columns' });
|
||||
const columns = await this.analyserQuery('columns', ['tables', 'views']);
|
||||
const columns = await this.analyserQuery('columns', ['tables', 'views'], { $owner: this.pool._schema_name });
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading primary keys' });
|
||||
const pkColumns = await this.analyserQuery('primaryKeys', ['tables']);
|
||||
const pkColumns = await this.analyserQuery('primaryKeys', ['tables'], { $owner: this.pool._schema_name });
|
||||
|
||||
//let fkColumns = null;
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading foreign keys' });
|
||||
const fkColumns = await this.analyserQuery('foreignKeys', ['tables']);
|
||||
const fkColumns = await this.analyserQuery('foreignKeys', ['tables'], { $owner: this.pool._schema_name });
|
||||
this.feedback({ analysingMessage: 'Loading views' });
|
||||
const views = await this.analyserQuery('views', ['views']);
|
||||
let geometryColumns = { rows: [] };
|
||||
let geographyColumns = { rows: [] };
|
||||
const views = await this.analyserQuery('views', ['views'], { $owner: this.pool._schema_name });
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading materialized views' });
|
||||
const matviews = this.driver.dialect.materializedViews ? await this.analyserQuery('matviews', ['matviews']) : null;
|
||||
this.feedback({ analysingMessage: 'Loading materialized view columns' });
|
||||
const matviewColumns = this.driver.dialect.materializedViews
|
||||
? await this.analyserQuery('matviewColumns', ['matviews'])
|
||||
const matviews = this.driver.dialect.materializedViews
|
||||
? await this.analyserQuery('matviews', ['matviews'], { $owner: this.pool._schema_name })
|
||||
: null;
|
||||
this.feedback({ analysingMessage: 'Loading routines' });
|
||||
const routines = await this.analyserQuery('routines', ['procedures', 'functions']);
|
||||
const routines = await this.analyserQuery('routines', ['procedures', 'functions'], {
|
||||
$owner: this.pool._schema_name,
|
||||
});
|
||||
this.feedback({ analysingMessage: 'Loading indexes' });
|
||||
const indexes = this.driver.__analyserInternals.skipIndexes
|
||||
? { rows: [] }
|
||||
: await this.analyserQuery('indexes', ['tables']);
|
||||
this.feedback({ analysingMessage: 'Loading index columns' });
|
||||
// const indexcols = this.driver.__analyserInternals.skipIndexes
|
||||
// ? { rows: [] }
|
||||
// : await this.driver.query(this.pool, this.createQuery('indexcols', ['tables']));
|
||||
const indexes = await this.analyserQuery('indexes', ['tables'], { $owner: this.pool._schema_name });
|
||||
this.feedback({ analysingMessage: 'Loading unique names' });
|
||||
const uniqueNames = await this.analyserQuery('uniqueNames', ['tables']);
|
||||
const uniqueNames = await this.analyserQuery('uniqueNames', ['tables'], { $owner: this.pool._schema_name });
|
||||
this.feedback({ analysingMessage: 'Finalizing DB structure' });
|
||||
|
||||
const fkColumnsMapped = fkColumns.rows.map(x => ({
|
||||
pureName: x.pure_name,
|
||||
schemaName: x.schema_name,
|
||||
// schemaName: x.schema_name,
|
||||
constraintSchema: x.constraint_schema,
|
||||
constraintName: x.constraint_name,
|
||||
columnName: x.column_name,
|
||||
@ -114,11 +86,11 @@ class Analyser extends DatabaseAnalyser {
|
||||
updateAction: x.update_action,
|
||||
deleteAction: x.delete_action,
|
||||
refTableName: x.ref_table_name,
|
||||
refSchemaName: x.ref_schema_name,
|
||||
// refSchemaName: x.ref_schema_name,
|
||||
}));
|
||||
const pkColumnsMapped = pkColumns.rows.map(x => ({
|
||||
pureName: x.pure_name,
|
||||
schemaName: x.schema_name,
|
||||
// schemaName: x.schema_name,
|
||||
constraintSchema: x.constraint_schema,
|
||||
constraintName: x.constraint_name,
|
||||
columnName: x.column_name,
|
||||
@ -131,21 +103,20 @@ class Analyser extends DatabaseAnalyser {
|
||||
tables: tables.rows.map(table => {
|
||||
const newTable = {
|
||||
pureName: table.pure_name,
|
||||
schemaName: table.schema_name,
|
||||
objectId: `tables:${table.schema_name}.${table.pure_name}`,
|
||||
// schemaName: table.schema_name,
|
||||
objectId: `tables:${table.pure_name}`,
|
||||
contentHash: table.hash_code_columns ? `${table.hash_code_columns}-${table.hash_code_constraints}` : null,
|
||||
};
|
||||
return {
|
||||
...newTable,
|
||||
columns: (columnsGrouped[columnGroup(table)] || []).map(col =>
|
||||
getColumnInfo(col, newTable, geometryColumns, geographyColumns)
|
||||
),
|
||||
columns: (columnsGrouped[columnGroup(table)] || []).map(col => getColumnInfo(col, newTable)),
|
||||
primaryKey: DatabaseAnalyser.extractPrimaryKeys(newTable, pkColumnsMapped),
|
||||
foreignKeys: DatabaseAnalyser.extractForeignKeys(newTable, fkColumnsMapped),
|
||||
indexes: _.uniqBy(
|
||||
indexes.rows.filter(
|
||||
idx =>
|
||||
idx.tableName == table.pureName && !uniqueNames.rows.find(x => x.constraintName == idx.constraintName)
|
||||
idx.tableName == newTable.pureName &&
|
||||
!uniqueNames.rows.find(x => x.constraintName == idx.constraintName)
|
||||
),
|
||||
'constraintName'
|
||||
).map(idx => ({
|
||||
@ -155,12 +126,13 @@ class Analyser extends DatabaseAnalyser {
|
||||
.filter(col => col.tableName == idx.tableName && col.constraintName == idx.constraintName)
|
||||
.map(col => ({
|
||||
..._.pick(col, ['columnName']),
|
||||
isDescending: col.descending == 'DESC',
|
||||
})),
|
||||
})),
|
||||
uniques: _.uniqBy(
|
||||
indexes.rows.filter(
|
||||
idx =>
|
||||
idx.tableName == table.pureName && uniqueNames.rows.find(x => x.constraintName == idx.constraintName)
|
||||
idx.tableName == newTable.pureName && uniqueNames.rows.find(x => x.constraintName == idx.constraintName)
|
||||
),
|
||||
'constraintName'
|
||||
).map(idx => ({
|
||||
@ -174,108 +146,47 @@ class Analyser extends DatabaseAnalyser {
|
||||
};
|
||||
}),
|
||||
views: views.rows.map(view => ({
|
||||
objectId: `views:${view.schema_name}.${view.pure_name}`,
|
||||
objectId: `views:${view.pure_name}`,
|
||||
pureName: view.pure_name,
|
||||
schemaName: view.schema_name,
|
||||
// schemaName: view.schema_name,
|
||||
contentHash: view.hash_code,
|
||||
createSql: `CREATE VIEW "${view.schema_name}"."${view.pure_name}"\nAS\n${view.create_sql}`,
|
||||
createSql: `CREATE VIEW "${view.pure_name}"\nAS\n${view.create_sql}`,
|
||||
columns: (columnsGrouped[columnGroup(view)] || []).map(col => getColumnInfo(col)),
|
||||
})),
|
||||
matviews: matviews
|
||||
? matviews.rows.map(matview => ({
|
||||
objectId: `matviews:${matview.schema_name}.${matview.pure_name}`,
|
||||
objectId: `matviews:${matview.pure_name}`,
|
||||
pureName: matview.pure_name,
|
||||
schemaName: matview.schema_name,
|
||||
// schemaName: matview.schema_name,
|
||||
contentHash: matview.hash_code,
|
||||
createSql: `CREATE MATERIALIZED VIEW "${matview.schema_name}"."${matview.pure_name}"\nAS\n${matview.definition}`,
|
||||
columns: matviewColumns.rows
|
||||
.filter(col => col.pure_name == matview.pure_name && col.schema_name == matview.schema_name)
|
||||
.map(col => getColumnInfo(col)),
|
||||
createSql: `CREATE MATERIALIZED VIEW "${matview.pure_name}"\nAS\n${matview.definition}`,
|
||||
columns: (columnsGrouped[columnGroup(view)] || []).map(col => getColumnInfo(col)),
|
||||
}))
|
||||
: undefined,
|
||||
procedures: routines.rows
|
||||
.filter(x => x.object_type == 'PROCEDURE')
|
||||
.map(proc => ({
|
||||
objectId: `procedures:${proc.schema_name}.${proc.pure_name}`,
|
||||
objectId: `procedures:${proc.pure_name}`,
|
||||
pureName: proc.pure_name,
|
||||
schemaName: proc.schema_name,
|
||||
createSql: `CREATE PROCEDURE "${proc.schema_name}"."${proc.pure_name}"() LANGUAGE ${proc.language}\nAS\n$$\n${proc.definition}\n$$`,
|
||||
// schemaName: proc.schema_name,
|
||||
createSql: `CREATE PROCEDURE "${proc.pure_name}"() LANGUAGE ${proc.language}\nAS\n$$\n${proc.definition}\n$$`,
|
||||
contentHash: proc.hash_code,
|
||||
})),
|
||||
functions: routines.rows
|
||||
.filter(x => x.object_type == 'FUNCTION')
|
||||
.map(func => ({
|
||||
objectId: `functions:${func.schema_name}.${func.pure_name}`,
|
||||
createSql: `CREATE FUNCTION "${func.schema_name}"."${func.pure_name}"() RETURNS ${func.data_type} LANGUAGE ${func.language}\nAS\n$$\n${func.definition}\n$$`,
|
||||
objectId: `functions:${func.pure_name}`,
|
||||
createSql: `CREATE FUNCTION "${func.pure_name}"() RETURNS ${func.data_type} LANGUAGE ${func.language}\nAS\n$$\n${func.definition}\n$$`,
|
||||
pureName: func.pure_name,
|
||||
schemaName: func.schema_name,
|
||||
// schemaName: func.schema_name,
|
||||
contentHash: func.hash_code,
|
||||
})),
|
||||
};
|
||||
|
||||
// this.feedback({ analysingMessage: 'Debug sleep' });
|
||||
// await new Promise(resolve => setTimeout(resolve, 90 * 1000));
|
||||
|
||||
this.feedback({ analysingMessage: null });
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
async _getFastSnapshot() {
|
||||
return null;
|
||||
|
||||
const tableModificationsQueryData = this.driver.dialect.stringAgg
|
||||
? await this.analyserQuery('tableModifications')
|
||||
: null;
|
||||
const viewModificationsQueryData = await this.analyserQuery('viewModifications');
|
||||
const matviewModificationsQueryData = this.driver.dialect.materializedViews
|
||||
? await this.analyserQuery('matviewModifications')
|
||||
: null;
|
||||
const routineModificationsQueryData = await this.analyserQuery('routineModifications');
|
||||
|
||||
return {
|
||||
tables: tableModificationsQueryData
|
||||
? tableModificationsQueryData.rows.map(x => ({
|
||||
objectId: `tables:${x.schema_name}.${x.pure_name}`,
|
||||
pureName: x.pure_name,
|
||||
schemaName: x.schema_name,
|
||||
contentHash: `${x.hash_code_columns}-${x.hash_code_constraints}`,
|
||||
}))
|
||||
: null,
|
||||
views: viewModificationsQueryData
|
||||
? viewModificationsQueryData.rows.map(x => ({
|
||||
objectId: `views:${x.schema_name}.${x.pure_name}`,
|
||||
pureName: x.pure_name,
|
||||
schemaName: x.schema_name,
|
||||
contentHash: x.hash_code,
|
||||
}))
|
||||
: undefined,
|
||||
matviews: matviewModificationsQueryData
|
||||
? matviewModificationsQueryData.rows.map(x => ({
|
||||
objectId: `matviews:${x.schema_name}.${x.pure_name}`,
|
||||
pureName: x.pure_name,
|
||||
schemaName: x.schema_name,
|
||||
contentHash: x.hash_code,
|
||||
}))
|
||||
: undefined,
|
||||
procedures: routineModificationsQueryData.rows
|
||||
.filter(x => x.object_type == 'PROCEDURE')
|
||||
.map(x => ({
|
||||
objectId: `procedures:${x.schema_name}.${x.pure_name}`,
|
||||
pureName: x.pure_name,
|
||||
schemaName: x.schema_name,
|
||||
contentHash: x.hash_code,
|
||||
})),
|
||||
functions: routineModificationsQueryData.rows
|
||||
.filter(x => x.object_type == 'FUNCTION')
|
||||
.map(x => ({
|
||||
objectId: `functions:${x.schema_name}.${x.pure_name}`,
|
||||
pureName: x.pure_name,
|
||||
schemaName: x.schema_name,
|
||||
contentHash: x.hash_code,
|
||||
})),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Analyser;
|
||||
|
@ -0,0 +1,38 @@
|
||||
const { createBulkInsertStreamBase } = require('dbgate-tools');
|
||||
const _ = require('lodash');
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {import('dbgate-types').EngineDriver} driver
|
||||
*/
|
||||
function createOracleBulkInsertStream(driver, stream, pool, name, options) {
|
||||
const fullNameQuoted = name.schemaName
|
||||
? `${driver.dialect.quoteIdentifier(name.schemaName)}.${driver.dialect.quoteIdentifier(name.pureName)}`
|
||||
: driver.dialect.quoteIdentifier(name.pureName);
|
||||
|
||||
const writable = createBulkInsertStreamBase(driver, stream, pool, name, {
|
||||
...options,
|
||||
// this is really not used, send method below is used instead
|
||||
commitAfterInsert: true,
|
||||
});
|
||||
|
||||
writable.send = async () => {
|
||||
const dmp = driver.createDumper();
|
||||
dmp.putRaw(`INSERT INTO ${fullNameQuoted} (`);
|
||||
dmp.putCollection(',', writable.columnNames, col => dmp.putRaw(driver.dialect.quoteIdentifier(col)));
|
||||
dmp.putRaw(')\n VALUES (\n');
|
||||
dmp.put(
|
||||
'%,s',
|
||||
writable.columnNames.map((c, i) => `:C${i}`)
|
||||
);
|
||||
dmp.putRaw(')');
|
||||
|
||||
const rows = writable.buffer.map(row => _.mapKeys(row, (v, k) => `c${writable.columnNames.indexOf(k)}`));
|
||||
await pool.executeMany(dmp.s, rows, { autoCommit: true });
|
||||
writable.buffer = [];
|
||||
};
|
||||
|
||||
return writable;
|
||||
}
|
||||
|
||||
module.exports = createOracleBulkInsertStream;
|
@ -4,19 +4,9 @@ const stream = require('stream');
|
||||
const driverBases = require('../frontend/drivers');
|
||||
const Analyser = require('./Analyser');
|
||||
//--const pg = require('pg');
|
||||
//const oracledb = require('oracledb');
|
||||
const oracledb = require('oracledb');
|
||||
const { createBulkInsertStreamBase, makeUniqueColumnNames } = require('dbgate-tools');
|
||||
|
||||
|
||||
let requireOracledb; // native module
|
||||
|
||||
let oracledbValue;
|
||||
function getOracledb() {
|
||||
if (!oracledbValue) {
|
||||
oracledbValue = requireOracledb();
|
||||
}
|
||||
return oracledbValue;
|
||||
}
|
||||
const createOracleBulkInsertStream = require('./createOracleBulkInsertStream');
|
||||
|
||||
/*
|
||||
pg.types.setTypeParser(1082, 'text', val => val); // date
|
||||
@ -57,23 +47,31 @@ const drivers = driverBases.map(driverBase => ({
|
||||
database,
|
||||
databaseUrl,
|
||||
useDatabaseUrl,
|
||||
serviceName,
|
||||
ssl,
|
||||
isReadOnly,
|
||||
authType,
|
||||
socketPath,
|
||||
}) {
|
||||
client = await getOracledb().getConnection({
|
||||
client = await oracledb.getConnection({
|
||||
user,
|
||||
password,
|
||||
connectString: useDatabaseUrl ? databaseUrl : port ? `${server}:${port}` : server,
|
||||
connectString: useDatabaseUrl ? databaseUrl : port ? `${server}:${port}/${serviceName}` : server,
|
||||
});
|
||||
if (database) {
|
||||
await client.execute(`ALTER SESSION SET CURRENT_SCHEMA = ${database}`);
|
||||
}
|
||||
client._schema_name = database;
|
||||
return client;
|
||||
},
|
||||
async close(pool) {
|
||||
return pool.end();
|
||||
},
|
||||
async query(client, sql) {
|
||||
//console.log('query sql', sql);
|
||||
if (sql.trim() == 'COMMIT;') {
|
||||
sql = 'COMMIT';
|
||||
}
|
||||
|
||||
if (sql == null) {
|
||||
return {
|
||||
rows: [],
|
||||
@ -101,112 +99,155 @@ const drivers = driverBases.map(driverBase => ({
|
||||
});
|
||||
*/
|
||||
// console.log('queryStream', sql);
|
||||
const query = client.queryStream(sql);
|
||||
// const consumeStream = new Promise((resolve, reject) => {
|
||||
let rowcount = 0;
|
||||
let wasHeader = false;
|
||||
|
||||
query.on('metadata', row => {
|
||||
// console.log('metadata', row);
|
||||
if (!wasHeader) {
|
||||
columns = extractOracleColumns(row);
|
||||
if (columns && columns.length > 0) {
|
||||
options.recordset(columns);
|
||||
if (sql.trim().toLowerCase().startsWith('select')) {
|
||||
const query = client.queryStream(sql);
|
||||
// const consumeStream = new Promise((resolve, reject) => {
|
||||
let rowcount = 0;
|
||||
let wasHeader = false;
|
||||
|
||||
query.on('metadata', row => {
|
||||
// console.log('metadata', row);
|
||||
if (!wasHeader) {
|
||||
columns = extractOracleColumns(row);
|
||||
if (columns && columns.length > 0) {
|
||||
options.recordset(columns);
|
||||
}
|
||||
wasHeader = true;
|
||||
}
|
||||
wasHeader = true;
|
||||
}
|
||||
|
||||
// options.row(zipDataRow(row, columns));
|
||||
});
|
||||
|
||||
query.on('data', row => {
|
||||
// console.log('stream DATA');
|
||||
if (!wasHeader) {
|
||||
columns = extractOracleColumns(row);
|
||||
if (columns && columns.length > 0) {
|
||||
options.recordset(columns);
|
||||
}
|
||||
wasHeader = true;
|
||||
}
|
||||
options.row(zipDataRow(row, columns));
|
||||
});
|
||||
|
||||
query.on('end', () => {
|
||||
const { command, rowCount } = query._result || {};
|
||||
|
||||
if (command != 'SELECT' && _.isNumber(rowCount)) {
|
||||
options.info({
|
||||
message: `${rowCount} rows affected`,
|
||||
time: new Date(),
|
||||
severity: 'info',
|
||||
});
|
||||
}
|
||||
|
||||
if (!wasHeader) {
|
||||
columns = extractOracleColumns(query._result);
|
||||
if (columns && columns.length > 0) {
|
||||
options.recordset(columns);
|
||||
}
|
||||
wasHeader = true;
|
||||
}
|
||||
|
||||
options.done();
|
||||
});
|
||||
|
||||
query.on('error', error => {
|
||||
console.log('ERROR', error);
|
||||
const { message, lineNumber, procName } = error;
|
||||
options.info({
|
||||
message,
|
||||
line: lineNumber,
|
||||
procedure: procName,
|
||||
time: new Date(),
|
||||
severity: 'error',
|
||||
// options.row(zipDataRow(row, columns));
|
||||
});
|
||||
options.done();
|
||||
});
|
||||
query.on('close', function () {
|
||||
//console.log("stream 'close' event");
|
||||
// The underlying ResultSet has been closed, so the connection can now
|
||||
// be closed, if desired. Note: do not close connections on 'end'.
|
||||
//resolve(rowcount);
|
||||
});
|
||||
//});
|
||||
|
||||
query.on('data', row => {
|
||||
// console.log('stream DATA');
|
||||
if (!wasHeader) {
|
||||
columns = extractOracleColumns(row);
|
||||
if (columns && columns.length > 0) {
|
||||
options.recordset(columns);
|
||||
}
|
||||
wasHeader = true;
|
||||
}
|
||||
options.row(zipDataRow(row, columns));
|
||||
});
|
||||
|
||||
query.on('end', () => {
|
||||
const { command, rowCount } = query._result || {};
|
||||
|
||||
if (command != 'SELECT' && _.isNumber(rowCount)) {
|
||||
options.info({
|
||||
message: `${rowCount} rows affected`,
|
||||
time: new Date(),
|
||||
severity: 'info',
|
||||
});
|
||||
}
|
||||
|
||||
if (!wasHeader) {
|
||||
columns = extractOracleColumns(query._result);
|
||||
if (columns && columns.length > 0) {
|
||||
options.recordset(columns);
|
||||
}
|
||||
wasHeader = true;
|
||||
}
|
||||
|
||||
options.done();
|
||||
});
|
||||
|
||||
query.on('error', error => {
|
||||
console.log('ERROR', error);
|
||||
const { message, lineNumber, procName } = error;
|
||||
options.info({
|
||||
message,
|
||||
line: lineNumber,
|
||||
procedure: procName,
|
||||
time: new Date(),
|
||||
severity: 'error',
|
||||
});
|
||||
options.done();
|
||||
});
|
||||
query.on('close', function () {
|
||||
//console.log("stream 'close' event");
|
||||
// The underlying ResultSet has been closed, so the connection can now
|
||||
// be closed, if desired. Note: do not close connections on 'end'.
|
||||
//resolve(rowcount);
|
||||
});
|
||||
//});
|
||||
} else {
|
||||
client.execute(sql, (err, res) => {
|
||||
if (err) {
|
||||
console.log('Error query', err, sql);
|
||||
options.info({
|
||||
message: err.message,
|
||||
time: new Date(),
|
||||
severity: 'error',
|
||||
});
|
||||
} else {
|
||||
const { rowsAffected, metaData, rows } = res || {};
|
||||
|
||||
if (rows && metaData) {
|
||||
const columns = extractOracleColumns(metaData);
|
||||
options.recordset(columns);
|
||||
for (const row of rows) {
|
||||
options.row(zipDataRow(row, columns));
|
||||
}
|
||||
} else if (rowsAffected) {
|
||||
options.info({
|
||||
message: `${rowsAffected} rows affected`,
|
||||
time: new Date(),
|
||||
severity: 'info',
|
||||
});
|
||||
}
|
||||
}
|
||||
options.done();
|
||||
});
|
||||
}
|
||||
//const numrows = await consumeStream;
|
||||
//console.log('Rows selected: ' + numrows);
|
||||
//client.query(query);
|
||||
},
|
||||
async getVersion(client) {
|
||||
//const { rows } = await this.query(client, "SELECT banner as version FROM v$version WHERE banner LIKE 'Oracle%'");
|
||||
const { rows } = await this.query(client, 'SELECT version as "version" FROM v$instance');
|
||||
const { version } = rows[0];
|
||||
|
||||
const isCockroach = false; //version.toLowerCase().includes('cockroachdb');
|
||||
const isRedshift = false; // version.toLowerCase().includes('redshift');
|
||||
const isOracle = true;
|
||||
|
||||
const m = version.match(/([\d\.]+)/);
|
||||
//console.log('M', m);
|
||||
let versionText = null;
|
||||
let versionMajor = null;
|
||||
let versionMinor = null;
|
||||
if (m) {
|
||||
if (isOracle) versionText = `Oracle ${m[1]}`;
|
||||
const numbers = m[1].split('.');
|
||||
if (numbers[0]) versionMajor = parseInt(numbers[0]);
|
||||
if (numbers[1]) versionMinor = parseInt(numbers[1]);
|
||||
async getVersionCore(client) {
|
||||
try {
|
||||
const { rows } = await this.query(
|
||||
client,
|
||||
"SELECT product || ' ' || version_full as \"version\" FROM product_component_version WHERE product LIKE 'Oracle%Database%'"
|
||||
);
|
||||
return rows[0].version.replace(' ', ' ');
|
||||
} catch (e) {
|
||||
const { rows } = await this.query(client, 'SELECT banner as "version" FROM v$version');
|
||||
return rows[0].version;
|
||||
}
|
||||
},
|
||||
async getVersion(client) {
|
||||
try {
|
||||
//const { rows } = await this.query(client, "SELECT banner as version FROM v$version WHERE banner LIKE 'Oracle%'");
|
||||
// const { rows } = await this.query(client, 'SELECT version as "version" FROM v$instance');
|
||||
const version = await this.getVersionCore(client);
|
||||
|
||||
return {
|
||||
version,
|
||||
versionText,
|
||||
isOracle,
|
||||
isCockroach,
|
||||
isRedshift,
|
||||
versionMajor,
|
||||
versionMinor,
|
||||
};
|
||||
const m = version.match(/(\d+[a-z])\s+(\w+).*(\d+)\.(\d+)/);
|
||||
//console.log('M', m);
|
||||
let versionText = null;
|
||||
let versionMajor = null;
|
||||
let versionMinor = null;
|
||||
if (m) {
|
||||
versionText = `Oracle ${m[1]} ${m[2]}`;
|
||||
if (m[3]) versionMajor = parseInt(m[3]);
|
||||
if (m[4]) versionMinor = parseInt(m[4]);
|
||||
}
|
||||
|
||||
return {
|
||||
version,
|
||||
versionText,
|
||||
versionMajor,
|
||||
versionMinor,
|
||||
};
|
||||
} catch (e) {
|
||||
return {
|
||||
version: '???',
|
||||
versionText: 'Oracle ???',
|
||||
versionMajor: null,
|
||||
versionMinor: null,
|
||||
};
|
||||
}
|
||||
},
|
||||
async readQuery(client, sql, structure) {
|
||||
/*
|
||||
@ -261,11 +302,10 @@ const drivers = driverBases.map(driverBase => ({
|
||||
return pass;
|
||||
},
|
||||
async writeTable(pool, name, options) {
|
||||
// @ts-ignore
|
||||
return createBulkInsertStreamBase(this, stream, pool, name, options);
|
||||
return createOracleBulkInsertStream(this, stream, pool, name, options);
|
||||
},
|
||||
async listDatabases(client) {
|
||||
const { rows } = await this.query(client, 'SELECT instance_name AS "name" FROM v$instance');
|
||||
const { rows } = await this.query(client, 'SELECT username as "name" from all_users order by username');
|
||||
return rows;
|
||||
},
|
||||
|
||||
@ -283,10 +323,4 @@ const drivers = driverBases.map(driverBase => ({
|
||||
},
|
||||
}));
|
||||
|
||||
drivers.initialize = dbgateEnv => {
|
||||
if (dbgateEnv.nativeModules && dbgateEnv.nativeModules.oracledb) {
|
||||
requireOracledb = dbgateEnv.nativeModules.oracledb;
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = drivers;
|
||||
|
@ -3,7 +3,4 @@ const drivers = require('./drivers');
|
||||
module.exports = {
|
||||
packageName: 'dbgate-plugin-oracle',
|
||||
drivers,
|
||||
initialize(dbgateEnv) {
|
||||
drivers.initialize(dbgateEnv);
|
||||
},
|
||||
};
|
||||
|
@ -1,6 +1,6 @@
|
||||
module.exports = `
|
||||
select
|
||||
owner as "schema_name",
|
||||
-- owner as "schema_name",
|
||||
table_name as "pure_name",
|
||||
column_name as "column_name",
|
||||
nullable as "is_nullable",
|
||||
@ -10,6 +10,6 @@ select
|
||||
data_scale as "numeric_scale",
|
||||
data_default as "default_value"
|
||||
FROM all_tab_columns av
|
||||
where TABLE_NAME =OBJECT_ID_CONDITION
|
||||
where OWNER='$owner' AND 'tables:' || TABLE_NAME =OBJECT_ID_CONDITION
|
||||
order by column_id
|
||||
`;
|
@ -1,2 +0,0 @@
|
||||
module.exports = `
|
||||
`;
|
@ -1,2 +0,0 @@
|
||||
module.exports = `
|
||||
`;
|
@ -1,2 +0,0 @@
|
||||
module.exports = `
|
||||
`;
|
@ -1,16 +1,16 @@
|
||||
module.exports = `
|
||||
select fk.constraint_name as "constraint_name",
|
||||
fk.owner as "constraint_schema",
|
||||
-- fk.owner as "constraint_schema",
|
||||
fk.table_name as "pure_name",
|
||||
fk.owner as "schema_name",
|
||||
-- fk.owner as "schema_name",
|
||||
fk.delete_rule as "update_action",
|
||||
fk.delete_rule as "delete_action",
|
||||
ref.table_name as "ref_table_name",
|
||||
ref.owner as "ref_schema_name",
|
||||
-- ref.owner as "ref_schema_name",
|
||||
basecol.column_name as "column_name",
|
||||
refcol.column_name as "ref_column_name"
|
||||
from all_cons_columns refcol, all_cons_columns basecol, all_constraints ref, all_constraints fk
|
||||
where fk.constraint_type = 'R'
|
||||
where fk.OWNER = '$owner' AND fk.constraint_type = 'R'
|
||||
and ref.owner = fk.r_owner
|
||||
and ref.constraint_name = fk.r_constraint_name
|
||||
and basecol.owner = fk.owner
|
||||
@ -19,6 +19,6 @@ and basecol.table_name = fk.table_name
|
||||
and refcol.owner = ref.owner
|
||||
and refcol.constraint_name = ref.constraint_name
|
||||
and refcol.table_name = ref.table_name
|
||||
AND fk.constraint_name =OBJECT_ID_CONDITION
|
||||
AND 'tables:' || fk.table_name =OBJECT_ID_CONDITION
|
||||
order by basecol.position
|
||||
`;
|
||||
|
@ -1,2 +0,0 @@
|
||||
module.exports = `
|
||||
`;
|
@ -1,2 +0,0 @@
|
||||
module.exports = `
|
||||
`;
|
@ -1,15 +1,10 @@
|
||||
const columns = require('./columns');
|
||||
const tableModifications = require('./tableList');
|
||||
const tableList = require('./tableList');
|
||||
const viewModifications = require('./views');
|
||||
const matviewModifications = require('./matviews');
|
||||
const primaryKeys = require('./primaryKeys');
|
||||
const foreignKeys = require('./foreignKeys');
|
||||
const views = require('./views');
|
||||
const matviews = require('./matviews');
|
||||
const routines = require('./routines');
|
||||
const routineModifications = require('./routines');
|
||||
const matviewColumns = require('./matviewColumns');
|
||||
const indexes = require('./indexes'); // use mysql
|
||||
//const indexcols = require('./indexcols');
|
||||
const uniqueNames = require('./uniqueNames');
|
||||
@ -22,17 +17,12 @@ const uniqueNames = require('./uniqueNames');
|
||||
|
||||
module.exports = {
|
||||
columns,
|
||||
tableModifications,
|
||||
tableList,
|
||||
viewModifications,
|
||||
primaryKeys,
|
||||
foreignKeys,
|
||||
views,
|
||||
routines,
|
||||
routineModifications,
|
||||
matviews,
|
||||
matviewModifications,
|
||||
matviewColumns,
|
||||
indexes,
|
||||
// indexcols,
|
||||
uniqueNames,
|
||||
|
@ -1,2 +0,0 @@
|
||||
module.exports = `
|
||||
`;
|
@ -1,16 +1,15 @@
|
||||
module.exports = `
|
||||
select i.table_name as "tableName",
|
||||
i.table_owner as "schemaName",
|
||||
-- i.table_owner as "schemaName",
|
||||
i.index_name as "constraintName",
|
||||
i.index_type as "indexType",
|
||||
i.uniqueness as "Unique",
|
||||
ic.column_name as "columnName",
|
||||
ic.column_position as "postion",
|
||||
ic.descend as "descending"
|
||||
from all_ind_columns ic, all_indexes i
|
||||
where ic.index_owner = i.owner
|
||||
where INDEX_OWNER = '$owner' AND ic.index_owner = i.owner
|
||||
and ic.index_name = i.index_name
|
||||
and i.index_name =OBJECT_ID_CONDITION
|
||||
and 'tables:' || i.table_name =OBJECT_ID_CONDITION
|
||||
order by i.table_owner,
|
||||
i.table_name,
|
||||
i.index_name,
|
||||
|
@ -1,9 +0,0 @@
|
||||
module.exports = `
|
||||
SELECT owner "schema_name"
|
||||
, table_name "pure_name"
|
||||
, column_name "column_name"
|
||||
, data_type "data_type"
|
||||
FROM all_tab_columns av
|
||||
where table_name =OBJECT_ID_CONDITION
|
||||
order by column_id
|
||||
`;
|
@ -1,2 +0,0 @@
|
||||
module.exports = `
|
||||
`;
|
@ -1,5 +1,5 @@
|
||||
module.exports = `
|
||||
SELECT owner as schema_name,
|
||||
SELECT -- owner as schema_name,
|
||||
mview_name pure_name,
|
||||
container_name,
|
||||
'' || trim(
|
||||
@ -14,6 +14,6 @@ SELECT owner as schema_name,
|
||||
'//text()'
|
||||
)) definition
|
||||
FROM all_mviews
|
||||
where mview_name=OBJECT_ID_CONDITION
|
||||
where OWNER = '$owner' AND 'matviews:' || mview_name=OBJECT_ID_CONDITION
|
||||
order by owner, mview_name
|
||||
`;
|
@ -1,8 +1,8 @@
|
||||
module.exports = `
|
||||
select
|
||||
pk.owner as "constraint_schema",
|
||||
-- pk.owner as "constraint_schema",
|
||||
pk.constraint_name as "constraint_name",
|
||||
pk.owner as "schema_name",
|
||||
-- pk.owner as "schema_name",
|
||||
pk.table_name as "pure_name",
|
||||
basecol.column_name as "column_name"
|
||||
from all_cons_columns basecol,
|
||||
@ -11,6 +11,7 @@ where constraint_type = 'P'
|
||||
and basecol.owner = pk.owner
|
||||
and basecol.constraint_name = pk.constraint_name
|
||||
and basecol.table_name = pk.table_name
|
||||
and pk.constraint_name =OBJECT_ID_CONDITION
|
||||
and 'tables:' || basecol.table_name =OBJECT_ID_CONDITION
|
||||
and pk.owner = '$owner'
|
||||
order by basecol.position
|
||||
`;
|
||||
|
@ -1,2 +0,0 @@
|
||||
module.exports = `
|
||||
`;
|
@ -1,7 +1,7 @@
|
||||
module.exports = `
|
||||
select
|
||||
routine_name as "pure_name",
|
||||
routine_schema as "schema_name",
|
||||
-- routine_schema as "schema_name",
|
||||
routine_definition as "definition",
|
||||
standard_hash(routine_definition, 'MD5') as "hash_code",
|
||||
routine_type as "object_type",
|
||||
@ -32,6 +32,7 @@ from (select
|
||||
all_procedures ap,
|
||||
all_objects ao
|
||||
where
|
||||
ap.owner = '$owner' and
|
||||
ap.owner = ao.owner and
|
||||
ap.object_name = ao.object_name and
|
||||
ao.object_type in ('PACKAGE', 'PROCEDURE', 'FUNCTION')
|
||||
|
@ -1,9 +1,9 @@
|
||||
module.exports = `
|
||||
select
|
||||
owner "schema_name",
|
||||
table_name "pure_name"
|
||||
-- owner "schema_name",
|
||||
table_name "pure_name"
|
||||
from
|
||||
all_tables
|
||||
where TABLE_NAME =OBJECT_ID_CONDITION
|
||||
where OWNER='$owner' AND 'tables:' || TABLE_NAME =OBJECT_ID_CONDITION
|
||||
`;
|
||||
|
||||
|
@ -1,2 +0,0 @@
|
||||
module.exports = `
|
||||
`;
|
@ -1,6 +1,6 @@
|
||||
module.exports = `
|
||||
select constraint_name
|
||||
select constraint_name as "constraintName"
|
||||
from all_constraints
|
||||
where constraint_type = 'U'
|
||||
and constraint_name =OBJECT_ID_CONDITION
|
||||
where owner='$owner' and constraint_type = 'U'
|
||||
and 'tables:' || table_name =OBJECT_ID_CONDITION
|
||||
`;
|
||||
|
@ -1,2 +0,0 @@
|
||||
module.exports = `
|
||||
`;
|
@ -3,10 +3,10 @@ select avv.*,
|
||||
ora_hash("create_sql") as "hash_code"
|
||||
from (select
|
||||
view_name as "pure_name",
|
||||
owner as "schema_name",
|
||||
-- owner as "schema_name",
|
||||
SUBSTR(text_vc, 1, 3900) AS "create_sql"
|
||||
from all_views av
|
||||
where text_vc is not null
|
||||
where owner = '$owner' and text_vc is not null
|
||||
) avv
|
||||
where "pure_name" =OBJECT_ID_CONDITION
|
||||
where 'views:' || "pure_name" =OBJECT_ID_CONDITION
|
||||
`;
|
||||
|
@ -1,100 +1,123 @@
|
||||
const { SqlDumper, arrayToHexString, testEqualTypes } = global.DBGATE_TOOLS;
|
||||
|
||||
class Dumper extends SqlDumper {
|
||||
/** @param type {import('dbgate-types').TransformType} */
|
||||
transform(type, dumpExpr) {
|
||||
switch (type) {
|
||||
case 'GROUP:YEAR':
|
||||
case 'YEAR':
|
||||
this.put('^extract(^year ^from %c)', dumpExpr);
|
||||
break;
|
||||
case 'MONTH':
|
||||
this.put('^extract(^month ^from %c)', dumpExpr);
|
||||
break;
|
||||
case 'DAY':
|
||||
this.put('^extract(^day ^from %c)', dumpExpr);
|
||||
break;
|
||||
case 'GROUP:MONTH':
|
||||
this.put("^to_char(%c, '%s')", dumpExpr, 'YYYY-MM');
|
||||
break;
|
||||
case 'GROUP:DAY':
|
||||
this.put("^to_char(%c, '%s')", dumpExpr, 'YYYY-MM-DD');
|
||||
break;
|
||||
default:
|
||||
dumpExpr();
|
||||
break;
|
||||
}
|
||||
createDatabase(name) {
|
||||
this.putCmd(
|
||||
`CREATE USER c##${name}
|
||||
IDENTIFIED BY ${name}
|
||||
DEFAULT TABLESPACE users
|
||||
TEMPORARY TABLESPACE temp
|
||||
QUOTA 10M ON users;`,
|
||||
name
|
||||
);
|
||||
}
|
||||
|
||||
dropRecreatedTempTable(tmptable) {
|
||||
this.putCmd('^drop ^table %i ^cascade', tmptable);
|
||||
}
|
||||
|
||||
renameTable(obj, newname) {
|
||||
this.putCmd('^alter ^table %f ^rename ^to %i', obj, newname);
|
||||
}
|
||||
|
||||
renameColumn(column, newcol) {
|
||||
this.putCmd('^alter ^table %f ^rename ^column %i ^to %i', column, column.columnName, newcol);
|
||||
}
|
||||
|
||||
dropTable(obj, options = {}) {
|
||||
this.put('^drop ^table');
|
||||
if (options.testIfExists) this.put(' ^if ^exists');
|
||||
this.put(' %f', obj);
|
||||
this.endCommand();
|
||||
}
|
||||
|
||||
//public override void CreateIndex(IndexInfo ix)
|
||||
//{
|
||||
//}
|
||||
|
||||
enableConstraints(table, enabled) {
|
||||
this.putCmd('^alter ^table %f %k ^trigger ^all', table, enabled ? 'enable' : 'disable');
|
||||
}
|
||||
// oracle uses implicit transactions
|
||||
beginTransaction() {}
|
||||
|
||||
columnDefinition(col, options) {
|
||||
if (col.autoIncrement) {
|
||||
this.put('^serial');
|
||||
super.columnType(col.dataType);
|
||||
this.put(' ^generated ^by ^default ^on ^null ^as ^identity');
|
||||
return;
|
||||
}
|
||||
super.columnDefinition(col, options);
|
||||
}
|
||||
|
||||
changeColumn(oldcol, newcol, constraints) {
|
||||
if (oldcol.columnName != newcol.columnName) {
|
||||
this.putCmd('^alter ^table %f ^rename ^column %i ^to %i', oldcol, oldcol.columnName, newcol.columnName);
|
||||
}
|
||||
if (!testEqualTypes(oldcol, newcol)) {
|
||||
this.putCmd('^alter ^table %f ^alter ^column %i ^type %s', oldcol, newcol.columnName, newcol.dataType);
|
||||
}
|
||||
if (oldcol.notNull != newcol.notNull) {
|
||||
if (newcol.notNull) this.putCmd('^alter ^table %f ^alter ^column %i ^set ^not ^null', newcol, newcol.columnName);
|
||||
else this.putCmd('^alter ^table %f ^alter ^column %i ^drop ^not ^null', newcol, newcol.columnName);
|
||||
}
|
||||
if (oldcol.defaultValue != newcol.defaultValue) {
|
||||
if (newcol.defaultValue == null) {
|
||||
this.putCmd('^alter ^table %f ^alter ^column %i ^drop ^default', newcol, newcol.columnName);
|
||||
} else {
|
||||
this.putCmd(
|
||||
'^alter ^table %f ^alter ^column %i ^set ^default %s',
|
||||
newcol,
|
||||
newcol.columnName,
|
||||
newcol.defaultValue
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
// /** @param type {import('dbgate-types').TransformType} */
|
||||
// transform(type, dumpExpr) {
|
||||
// switch (type) {
|
||||
// case 'GROUP:YEAR':
|
||||
// case 'YEAR':
|
||||
// this.put('^extract(^year ^from %c)', dumpExpr);
|
||||
// break;
|
||||
// case 'MONTH':
|
||||
// this.put('^extract(^month ^from %c)', dumpExpr);
|
||||
// break;
|
||||
// case 'DAY':
|
||||
// this.put('^extract(^day ^from %c)', dumpExpr);
|
||||
// break;
|
||||
// case 'GROUP:MONTH':
|
||||
// this.put("^to_char(%c, '%s')", dumpExpr, 'YYYY-MM');
|
||||
// break;
|
||||
// case 'GROUP:DAY':
|
||||
// this.put("^to_char(%c, '%s')", dumpExpr, 'YYYY-MM-DD');
|
||||
// break;
|
||||
// default:
|
||||
// dumpExpr();
|
||||
// break;
|
||||
// }
|
||||
// }
|
||||
|
||||
putValue(value) {
|
||||
if (value === true) this.putRaw('true');
|
||||
else if (value === false) this.putRaw('false');
|
||||
else super.putValue(value);
|
||||
}
|
||||
// dropRecreatedTempTable(tmptable) {
|
||||
// this.putCmd('^drop ^table %i ^cascade', tmptable);
|
||||
// }
|
||||
|
||||
putByteArrayValue(value) {
|
||||
this.putRaw(`e'\\\\x${arrayToHexString(value)}'`);
|
||||
}
|
||||
// renameTable(obj, newname) {
|
||||
// this.putCmd('^alter ^table %f ^rename ^to %i', obj, newname);
|
||||
// }
|
||||
|
||||
// renameColumn(column, newcol) {
|
||||
// this.putCmd('^alter ^table %f ^rename ^column %i ^to %i', column, column.columnName, newcol);
|
||||
// }
|
||||
|
||||
// dropTable(obj, options = {}) {
|
||||
// this.put('^drop ^table');
|
||||
// if (options.testIfExists) this.put(' ^if ^exists');
|
||||
// this.put(' %f', obj);
|
||||
// this.endCommand();
|
||||
// }
|
||||
|
||||
// //public override void CreateIndex(IndexInfo ix)
|
||||
// //{
|
||||
// //}
|
||||
|
||||
// enableConstraints(table, enabled) {
|
||||
// this.putCmd('^alter ^table %f %k ^trigger ^all', table, enabled ? 'enable' : 'disable');
|
||||
// }
|
||||
|
||||
// columnDefinition(col, options) {
|
||||
// if (col.autoIncrement) {
|
||||
// this.put('^serial');
|
||||
// return;
|
||||
// }
|
||||
// super.columnDefinition(col, options);
|
||||
// }
|
||||
|
||||
// changeColumn(oldcol, newcol, constraints) {
|
||||
// if (oldcol.columnName != newcol.columnName) {
|
||||
// this.putCmd('^alter ^table %f ^rename ^column %i ^to %i', oldcol, oldcol.columnName, newcol.columnName);
|
||||
// }
|
||||
// if (!testEqualTypes(oldcol, newcol)) {
|
||||
// this.putCmd('^alter ^table %f ^alter ^column %i ^type %s', oldcol, newcol.columnName, newcol.dataType);
|
||||
// }
|
||||
// if (oldcol.notNull != newcol.notNull) {
|
||||
// if (newcol.notNull) this.putCmd('^alter ^table %f ^alter ^column %i ^set ^not ^null', newcol, newcol.columnName);
|
||||
// else this.putCmd('^alter ^table %f ^alter ^column %i ^drop ^not ^null', newcol, newcol.columnName);
|
||||
// }
|
||||
// if (oldcol.defaultValue != newcol.defaultValue) {
|
||||
// if (newcol.defaultValue == null) {
|
||||
// this.putCmd('^alter ^table %f ^alter ^column %i ^drop ^default', newcol, newcol.columnName);
|
||||
// } else {
|
||||
// this.putCmd(
|
||||
// '^alter ^table %f ^alter ^column %i ^set ^default %s',
|
||||
// newcol,
|
||||
// newcol.columnName,
|
||||
// newcol.defaultValue
|
||||
// );
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
// putValue(value) {
|
||||
// if (value === true) this.putRaw('true');
|
||||
// else if (value === false) this.putRaw('false');
|
||||
// else super.putValue(value);
|
||||
// }
|
||||
|
||||
// putByteArrayValue(value) {
|
||||
// this.putRaw(`e'\\\\x${arrayToHexString(value)}'`);
|
||||
// }
|
||||
}
|
||||
|
||||
module.exports = Dumper;
|
||||
|
@ -6,7 +6,7 @@ const spatialTypes = ['GEOGRAPHY'];
|
||||
|
||||
/** @type {import('dbgate-types').SqlDialect} */
|
||||
const dialect = {
|
||||
rangeSelect: false,
|
||||
rangeSelect: true,
|
||||
limitSelect: false,
|
||||
offsetFetchRangeSyntax: true,
|
||||
ilike: true,
|
||||
@ -19,7 +19,6 @@ const dialect = {
|
||||
quoteIdentifier(s) {
|
||||
return '"' + s + '"';
|
||||
},
|
||||
stringAgg: true,
|
||||
|
||||
createColumn: true,
|
||||
dropColumn: true,
|
||||
@ -38,49 +37,30 @@ const dialect = {
|
||||
dropReferencesWhenDropTable: true,
|
||||
|
||||
predefinedDataTypes: [
|
||||
'bigint',
|
||||
'bigserial',
|
||||
'bit',
|
||||
'varbit',
|
||||
'boolean',
|
||||
'box',
|
||||
'bytea',
|
||||
'char(20)',
|
||||
'varchar(250)',
|
||||
'cidr',
|
||||
'circle',
|
||||
'date',
|
||||
'double precision',
|
||||
'inet',
|
||||
'int',
|
||||
'interval',
|
||||
'json',
|
||||
'jsonb',
|
||||
'line',
|
||||
'lseg',
|
||||
'macaddr',
|
||||
'macaddr8',
|
||||
'money',
|
||||
'numeric(10,2)',
|
||||
'path',
|
||||
'pg_lsn',
|
||||
'pg_snapshot',
|
||||
'point',
|
||||
'polygon',
|
||||
'real',
|
||||
'smallint',
|
||||
'smallserial',
|
||||
'serial',
|
||||
'text',
|
||||
'time',
|
||||
'timetz',
|
||||
'timestamp',
|
||||
'timestamptz',
|
||||
'tsquery',
|
||||
'tsvector',
|
||||
'txid_snapshot',
|
||||
'uuid',
|
||||
'xml',
|
||||
'VARCHAR2',
|
||||
'NUMBER',
|
||||
'DATE',
|
||||
'CLOB',
|
||||
'BLOB',
|
||||
'INTEGER',
|
||||
|
||||
'BFILE',
|
||||
'BINARY_DOUBLE',
|
||||
'BINARY_FLOAT',
|
||||
'CHAR',
|
||||
'FLOAT',
|
||||
'INTERVAL DAY',
|
||||
'INTERVAL YEAR',
|
||||
'LONG',
|
||||
'LONG RAW',
|
||||
'NCHAR',
|
||||
'NCLOB',
|
||||
'NVARCHAR2',
|
||||
'RAW',
|
||||
'ROWID',
|
||||
'TIMESTAMP',
|
||||
'UROWID',
|
||||
// 'XMLTYPE',
|
||||
],
|
||||
|
||||
createColumnViewExpression(columnName, dataType, source, alias) {
|
||||
@ -110,32 +90,15 @@ const oracleDriverBase = {
|
||||
getQuerySplitterOptions: () => oracleSplitterOptions,
|
||||
readOnlySessions: true,
|
||||
|
||||
databaseUrlPlaceholder: 'e.g. oracledb://user:password@localhost:1521',
|
||||
databaseUrlPlaceholder: 'e.g. localhost:1521/orcl',
|
||||
|
||||
showConnectionField: (field, values) => {
|
||||
if (field == 'useDatabaseUrl') return true;
|
||||
if (values.useDatabaseUrl) {
|
||||
return ['databaseUrl', 'isReadOnly'].includes(field);
|
||||
return ['databaseUrl', 'user', 'password'].includes(field);
|
||||
}
|
||||
|
||||
return ['user', 'password', 'defaultDatabase', 'singleDatabase', 'isReadOnly', 'server', 'port'].includes(field);
|
||||
},
|
||||
|
||||
beforeConnectionSave: connection => {
|
||||
const { databaseUrl } = connection;
|
||||
if (databaseUrl) {
|
||||
const m = databaseUrl.match(/\/([^/]+)($|\?)/);
|
||||
return {
|
||||
...connection,
|
||||
singleDatabase: !!m,
|
||||
defaultDatabase: m ? m[1] : null,
|
||||
};
|
||||
}
|
||||
return connection;
|
||||
},
|
||||
|
||||
__analyserInternals: {
|
||||
refTableCond: '',
|
||||
return ['user', 'password', 'server', 'port', 'serviceName'].includes(field);
|
||||
},
|
||||
|
||||
getNewObjectTemplates() {
|
||||
@ -168,7 +131,7 @@ $$ LANGUAGE plpgsql;`,
|
||||
const oracleDriver = {
|
||||
...oracleDriverBase,
|
||||
engine: 'oracle@dbgate-plugin-oracle',
|
||||
title: 'OracleDB (Experimental)',
|
||||
title: 'OracleDB',
|
||||
defaultPort: 1521,
|
||||
dialect: {
|
||||
...dialect,
|
||||
@ -189,7 +152,7 @@ const oracleDriver = {
|
||||
return dialect;
|
||||
},
|
||||
|
||||
showConnectionTab: (field) => field == 'sshTunnel',
|
||||
showConnectionTab: field => field == 'sshTunnel',
|
||||
};
|
||||
|
||||
module.exports = [oracleDriver];
|
||||
|
@ -22,7 +22,7 @@ var config = {
|
||||
plugins: [
|
||||
new webpack.IgnorePlugin({
|
||||
checkResource(resource) {
|
||||
const lazyImports = ['oracledb', 'uws'];
|
||||
const lazyImports = ['uws'];
|
||||
if (!lazyImports.includes(resource)) {
|
||||
return false;
|
||||
}
|
||||
|
@ -31,7 +31,7 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"dbgate-plugin-tools": "^1.0.7",
|
||||
"dbgate-query-splitter": "^4.9.3",
|
||||
"dbgate-query-splitter": "^4.10.1",
|
||||
"dbgate-tools": "^5.0.0-alpha.1",
|
||||
"lodash": "^4.17.21",
|
||||
"pg": "^8.11.5",
|
||||
|
@ -70,29 +70,23 @@ class Analyser extends DatabaseAnalyser {
|
||||
const tables = await this.analyserQuery(this.driver.dialect.stringAgg ? 'tableModifications' : 'tableList', [
|
||||
'tables',
|
||||
]);
|
||||
this.logger.debug({ count: tables.rows.length }, 'Tables loaded');
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading columns' });
|
||||
const columns = await this.analyserQuery('columns', ['tables', 'views']);
|
||||
this.logger.debug({ count: columns.rows.length }, 'Columns loaded');
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading primary keys' });
|
||||
const pkColumns = await this.analyserQuery('primaryKeys', ['tables']);
|
||||
this.logger.debug({ count: pkColumns.rows.length }, 'Primary keys loaded');
|
||||
|
||||
let fkColumns = null;
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading foreign key constraints' });
|
||||
const fk_tableConstraints = await this.analyserQuery('fk_tableConstraints', ['tables']);
|
||||
this.logger.debug({ count: fk_tableConstraints.rows.length }, 'Foreign keys loaded');
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading foreign key refs' });
|
||||
const fk_referentialConstraints = await this.analyserQuery('fk_referentialConstraints', ['tables']);
|
||||
this.logger.debug({ count: fk_referentialConstraints.rows.length }, 'Foreign key refs loaded');
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading foreign key columns' });
|
||||
const fk_keyColumnUsage = await this.analyserQuery('fk_keyColumnUsage', ['tables']);
|
||||
this.logger.debug({ count: fk_keyColumnUsage.rows.length }, 'Foreign key columns loaded');
|
||||
|
||||
const cntKey = x => `${x.constraint_name}|${x.constraint_schema}`;
|
||||
const fkRows = [];
|
||||
@ -134,50 +128,41 @@ class Analyser extends DatabaseAnalyser {
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading views' });
|
||||
const views = await this.analyserQuery('views', ['views']);
|
||||
this.logger.debug({ count: views.rows.length }, 'Views loaded');
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading materialized views' });
|
||||
const matviews = this.driver.dialect.materializedViews ? await this.analyserQuery('matviews', ['matviews']) : null;
|
||||
this.logger.debug({ count: matviews.rows.length }, 'Materialized views loaded');
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading materialized view columns' });
|
||||
const matviewColumns = this.driver.dialect.materializedViews
|
||||
? await this.analyserQuery('matviewColumns', ['matviews'])
|
||||
: null;
|
||||
this.logger.debug({ count: matviewColumns.rows.length }, 'Materialized view columns loaded');
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading routines' });
|
||||
const routines = await this.analyserQuery('routines', ['procedures', 'functions']);
|
||||
this.logger.debug({ count: routines.rows.length }, 'Routines loaded');
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading indexes' });
|
||||
const indexes = this.driver.__analyserInternals.skipIndexes
|
||||
? { rows: [] }
|
||||
: await this.analyserQuery('indexes', ['tables']);
|
||||
this.logger.debug({ count: indexes.rows.length }, 'Indexes loaded');
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading index columns' });
|
||||
const indexcols = this.driver.__analyserInternals.skipIndexes
|
||||
? { rows: [] }
|
||||
: await this.analyserQuery('indexcols', ['tables']);
|
||||
this.logger.debug({ count: indexcols.rows.length }, 'Indexes columns loaded');
|
||||
|
||||
this.feedback({ analysingMessage: 'Loading unique names' });
|
||||
const uniqueNames = await this.analyserQuery('uniqueNames', ['tables']);
|
||||
this.logger.debug({ count: uniqueNames.rows.length }, 'Uniques loaded');
|
||||
|
||||
let geometryColumns = { rows: [] };
|
||||
if (views.rows.find(x => x.pure_name == 'geometry_columns' && x.schema_name == 'public')) {
|
||||
this.feedback({ analysingMessage: 'Loading geometry columns' });
|
||||
geometryColumns = await this.analyserQuery('geometryColumns', ['tables']);
|
||||
this.logger.debug({ count: geometryColumns.rows.length }, 'Geometry columns loaded');
|
||||
}
|
||||
|
||||
let geographyColumns = { rows: [] };
|
||||
if (views.rows.find(x => x.pure_name == 'geography_columns' && x.schema_name == 'public')) {
|
||||
this.feedback({ analysingMessage: 'Loading geography columns' });
|
||||
geographyColumns = await this.analyserQuery('geographyColumns', ['tables']);
|
||||
this.logger.debug({ count: geographyColumns.rows.length }, 'Geography columns loaded');
|
||||
}
|
||||
|
||||
this.feedback({ analysingMessage: 'Finalizing DB structure' });
|
||||
|
@ -33,6 +33,7 @@ const dialect = {
|
||||
dropUnique: true,
|
||||
createCheck: true,
|
||||
dropCheck: true,
|
||||
allowMultipleValuesInsert: true,
|
||||
|
||||
dropReferencesWhenDropTable: true,
|
||||
requireStandaloneSelectForScopeIdentity: true,
|
||||
|
@ -30,7 +30,7 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"dbgate-plugin-tools": "^1.0.7",
|
||||
"dbgate-query-splitter": "^4.9.3",
|
||||
"dbgate-query-splitter": "^4.10.1",
|
||||
"dbgate-tools": "^5.0.0-alpha.1",
|
||||
"lodash": "^4.17.21",
|
||||
"webpack": "^5.91.0",
|
||||
|
@ -32,7 +32,7 @@
|
||||
"devDependencies": {
|
||||
"dbgate-tools": "^5.0.0-alpha.1",
|
||||
"dbgate-plugin-tools": "^1.0.4",
|
||||
"dbgate-query-splitter": "^4.9.3",
|
||||
"dbgate-query-splitter": "^4.10.1",
|
||||
"byline": "^5.0.0",
|
||||
"webpack": "^5.91.0",
|
||||
"webpack-cli": "^5.1.4"
|
||||
|
@ -17,6 +17,7 @@ const dialect = {
|
||||
explicitDropConstraint: true,
|
||||
stringEscapeChar: "'",
|
||||
fallbackDataType: 'nvarchar',
|
||||
allowMultipleValuesInsert: true,
|
||||
dropColumnDependencies: ['indexes', 'primaryKey', 'uniques'],
|
||||
quoteIdentifier(s) {
|
||||
return `[${s}]`;
|
||||
|
16
yarn.lock
16
yarn.lock
@ -3202,10 +3202,10 @@ dbgate-plugin-xml@^5.0.0-alpha.1:
|
||||
resolved "https://registry.yarnpkg.com/dbgate-plugin-xml/-/dbgate-plugin-xml-5.2.7.tgz#0762af51ba6f100e75a63907ea6c679e827c9f7c"
|
||||
integrity sha512-gBXy4qetf7eJQW6lM01B+OKLnKB8MKesojdYKysD9oZ+YpQCX8Tq7aHJCrN14FiyIDinpX61kmFH1+LGJ2RkxQ==
|
||||
|
||||
dbgate-query-splitter@^4.9.0, dbgate-query-splitter@^4.9.3:
|
||||
version "4.9.3"
|
||||
resolved "https://registry.yarnpkg.com/dbgate-query-splitter/-/dbgate-query-splitter-4.9.3.tgz#f66396da9ae3cc8f775a282143bfca3441248aa2"
|
||||
integrity sha512-QMppAy3S6NGQMawNokmhbpZURvLCETyu/8yTfqWUHGdlK963fdSpmoX1A+9SjCDp62sX0vYntfD7uzd6jVSRcw==
|
||||
dbgate-query-splitter@^4.10.1:
|
||||
version "4.10.1"
|
||||
resolved "https://registry.yarnpkg.com/dbgate-query-splitter/-/dbgate-query-splitter-4.10.1.tgz#dc40d792de06f779a743cad054d5e786006b03a9"
|
||||
integrity sha512-KqrB7NLP1jXbx8rN7gSmYUVorm6ICeqOV+oR+jHaBLXqqhWepHsKr6JJlFEeb/LhoVjnTDY/cy5zhW1dMIQF6A==
|
||||
|
||||
debug@2.6.9, debug@^2.2.0, debug@^2.3.3:
|
||||
version "2.6.9"
|
||||
@ -7978,10 +7978,10 @@ optionator@^0.8.1, optionator@^0.8.3:
|
||||
resolved "https://registry.yarnpkg.com/opts/-/opts-2.0.2.tgz#a17e189fbbfee171da559edd8a42423bc5993ce1"
|
||||
integrity sha512-k41FwbcLnlgnFh69f4qdUfvDQ+5vaSDnVPFI/y5XuhKRq97EnVVneO9F1ESVCdiVu4fCS2L8usX3mU331hB7pg==
|
||||
|
||||
oracledb@^5.5.0:
|
||||
version "5.5.0"
|
||||
resolved "https://registry.yarnpkg.com/oracledb/-/oracledb-5.5.0.tgz#0cf9af5d0c0815f74849ae9ed56aee823514d71b"
|
||||
integrity sha512-i5cPvMENpZP8nnqptB6l0pjiOyySj1IISkbM4Hr3yZEDdANo2eezarwZb9NQ8fTh5pRjmgpZdSyIbnn9N3AENw==
|
||||
oracledb@^6.5.1:
|
||||
version "6.5.1"
|
||||
resolved "https://registry.yarnpkg.com/oracledb/-/oracledb-6.5.1.tgz#814d985035acdb1a6470b1152af0ca3767569ede"
|
||||
integrity sha512-JzoSGei1wnvmqgKnAZK1W650mzHTZXx+7hClV4mwsbY/ZjUtrpnojNJMYJ2jkOhj7XG5oJPfXc4GqDKaNzkxqg==
|
||||
|
||||
os-tmpdir@~1.0.2:
|
||||
version "1.0.2"
|
||||
|
Loading…
Reference in New Issue
Block a user