Merge remote-tracking branch 'rinie/oracle'

This commit is contained in:
Jan Prochazka 2022-12-18 18:47:05 +01:00
commit 973015aed8
36 changed files with 1342 additions and 0 deletions

View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2022 Rinie Kervel
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -0,0 +1,10 @@
[![styled with prettier](https://img.shields.io/badge/styled_with-prettier-ff69b4.svg)](https://github.com/prettier/prettier)
[![NPM version](https://img.shields.io/npm/v/dbgate-plugin-oracle.svg)](https://www.npmjs.com/package/dbgate-plugin-oracle)
# dbgate-plugin-oracle
Use DbGate for install of this plugin
set NODE_OPTIONS=--openssl-legacy-provider
Map pg.client and pg.query to oracledb

View File

@ -0,0 +1,4 @@
<?xml version="1.0"?>
<svg xmlns='http://www.w3.org/2000/svg' width='32' height='20'>
<path fill="none" stroke="#C74634" stroke-width="4" d="m10,2a8,8 0 1,0 0,16h12a8,8 0 1,0 0-16z"/>
</svg>

After

Width:  |  Height:  |  Size: 190 B

View File

@ -0,0 +1,40 @@
{
"name": "dbgate-plugin-oracle",
"main": "dist/backend.js",
"version": "5.1.7-beta.1",
"license": "MIT",
"description": "Oracle connector plugin for DbGate",
"homepage": "https://dbgate.org",
"repository": {
"type": "git",
"url": "https://github.com/rinie/dbgate-plugin-oracle"
},
"author": "Rinie Kervel",
"keywords": [
"dbgate",
"dbgateplugin",
"oracle"
],
"files": [
"dist",
"icon.svg"
],
"scripts": {
"build:frontend": "webpack --config webpack-frontend.config",
"build:frontend:watch": "webpack --watch --config webpack-frontend.config",
"build:backend": "webpack --config webpack-backend.config.js",
"build": "yarn build:frontend && yarn build:backend",
"plugin": "yarn build && yarn pack && dbgate-plugin dbgate-plugin-oracle",
"copydist": "yarn build && yarn pack && dbgate-copydist ../dist/dbgate-plugin-oracle",
"plugout": "dbgate-plugout dbgate-plugin-oracle",
"prepublishOnly": "yarn build"
},
"devDependencies": {
"dbgate-plugin-tools": "^1.0.8",
"dbgate-query-splitter": "^4.9.0",
"dbgate-tools": "^5.1.7",
"lodash": "^4.17.21",
"webpack": "^4.42.0",
"webpack-cli": "^3.3.11"
}
}

View File

@ -0,0 +1,9 @@
module.exports = {
trailingComma: 'es5',
tabWidth: 2,
semi: true,
singleQuote: true,
arrowParen: 'avoid',
arrowParens: 'avoid',
printWidth: 120,
};

View File

@ -0,0 +1,281 @@
const fp = require('lodash/fp');
const _ = require('lodash');
const sql = require('./sql');
const { DatabaseAnalyser } = require('dbgate-tools');
const { isTypeString, isTypeNumeric } = require('dbgate-tools');
function normalizeTypeName(dataType) {
if (dataType == 'character varying') return 'varchar';
if (dataType == 'timestamp without time zone') return 'timestamp';
return dataType;
}
function getColumnInfo(
{ is_nullable, column_name, data_type, char_max_length, numeric_precision, numeric_ccale, default_value },
table = undefined,
geometryColumns = undefined,
geographyColumns = undefined
) {
const normDataType = normalizeTypeName(data_type);
let fullDataType = normDataType;
if (char_max_length && isTypeString(normDataType)) fullDataType = `${normDataType}(${char_max_length})`;
if (numeric_precision && numeric_ccale && isTypeNumeric(normDataType))
fullDataType = `${normDataType}(${numeric_precision},${numeric_ccale})`;
const autoIncrement = !!(default_value && default_value.startsWith('nextval('));
if (
table &&
geometryColumns &&
geometryColumns.rows.find(
x => x.schema_name == table.schemaName && x.pure_name == table.pureName && x.column_name == column_name
)
) {
fullDataType = 'geometry';
}
if (
table &&
geographyColumns &&
geographyColumns.rows.find(
x => x.schema_name == table.schemaName && x.pure_name == table.pureName && x.column_name == column_name
)
) {
fullDataType = 'geography';
}
return {
columnName: column_name,
dataType: fullDataType,
notNull: !is_nullable || is_nullable == 'NO' || is_nullable == 'no',
defaultValue: autoIncrement ? undefined : default_value,
autoIncrement,
};
}
class Analyser extends DatabaseAnalyser {
constructor(pool, driver, version) {
super(pool, driver, version);
}
createQuery(resFileName, typeFields) {
const query = super.createQuery(sql[resFileName], typeFields);
//if (query) return query.replace('#REFTABLECOND#', this.driver.__analyserInternals.refTableCond);
return query;
}
async _computeSingleObjectId() {
const { typeField, schemaName, pureName } = this.singleObjectFilter;
this.singleObjectId = `${typeField}:${schemaName || 'public'}.${pureName}`;
}
async _runAnalysis() {
this.feedback({ analysingMessage: 'Loading tables' });
const tables = await this.driver.query(
this.pool,
this.createQuery(this.driver.dialect.stringAgg ? 'tableList' : 'tableList', ['tables'])
);
this.feedback({ analysingMessage: 'Loading columns' });
const columns = await this.driver.query(this.pool, this.createQuery('columns', ['tables', 'views']));
this.feedback({ analysingMessage: 'Loading primary keys' });
const pkColumns = await this.driver.query(this.pool, this.createQuery('primaryKeys', ['tables']));
//let fkColumns = null;
this.feedback({ analysingMessage: 'Loading foreign keys' });
const fkColumns = await this.driver.query(this.pool, this.createQuery('foreignKeys', ['tables']));
this.feedback({ analysingMessage: 'Loading views' });
const views = await this.driver.query(this.pool, this.createQuery('views', ['views']));
let geometryColumns = { rows: [] };
let geographyColumns = { rows: [] };
this.feedback({ analysingMessage: 'Loading materialized views' });
const matviews = this.driver.dialect.materializedViews
? await this.driver.query(this.pool, this.createQuery('matviews', ['matviews']))
: null;
this.feedback({ analysingMessage: 'Loading materialized view columns' });
const matviewColumns = this.driver.dialect.materializedViews
? await this.driver.query(this.pool, this.createQuery('matviewColumns', ['matviews']))
: null;
this.feedback({ analysingMessage: 'Loading routines' });
const routines = await this.driver.query(this.pool, this.createQuery('routines', ['procedures', 'functions']));
this.feedback({ analysingMessage: 'Loading indexes' });
const indexes = this.driver.__analyserInternals.skipIndexes
? { rows: [] }
: await this.driver.query(this.pool, this.createQuery('indexes', ['tables']));
this.feedback({ analysingMessage: 'Loading index columns' });
// const indexcols = this.driver.__analyserInternals.skipIndexes
// ? { rows: [] }
// : await this.driver.query(this.pool, this.createQuery('indexcols', ['tables']));
this.feedback({ analysingMessage: 'Loading unique names' });
const uniqueNames = await this.driver.query(this.pool, this.createQuery('uniqueNames', ['tables']));
this.feedback({ analysingMessage: 'Finalizing DB structure' });
const columnColumnsMapped = fkColumns.rows.map(x => ({
pureName: x.pure_name,
schemaName: x.schema_name,
constraintSchema: x.constraint_schema,
constraintName: x.constraint_name,
columnName: x.column_name,
refColumnName: x.ref_column_name,
updateAction: x.update_action,
deleteAction: x.delete_action,
refTableName: x.ref_table_name,
refSchemaName: x.ref_schema_name,
}));
const pkColumnsMapped = pkColumns.rows.map(x => ({
pureName: x.pure_name,
schemaName: x.schema_name,
constraintSchema: x.constraint_schema,
constraintName: x.constraint_name,
columnName: x.column_name,
}));
const res = {
tables: tables.rows.map(table => {
const newTable = {
pureName: table.pure_name,
schemaName: table.schema_name,
objectId: `tables:${table.schema_name}.${table.pure_name}`,
contentHash: table.hash_code_columns ? `${table.hash_code_columns}-${table.hash_code_constraints}` : null,
};
return {
...newTable,
columns: columns.rows
.filter(col => col.pure_name == table.pure_name && col.schema_name == table.schema_name)
.map(col => getColumnInfo(col, newTable, geometryColumns, geographyColumns)),
primaryKey: DatabaseAnalyser.extractPrimaryKeys(newTable, pkColumnsMapped),
foreignKeys: DatabaseAnalyser.extractForeignKeys(newTable, columnColumnsMapped),
indexes: _.uniqBy(
indexes.rows.filter(
idx =>
idx.tableName == table.pureName && !uniqueNames.rows.find(x => x.constraintName == idx.constraintName)
),
'constraintName'
).map(idx => ({
..._.pick(idx, ['constraintName', 'indexType']),
isUnique: idx.Unique === 'UNIQUE',
columns: indexes.rows
.filter(col => col.tableName == idx.tableName && col.constraintName == idx.constraintName)
.map(col => ({
..._.pick(col, ['columnName']),
})),
})),
uniques: _.uniqBy(
indexes.rows.filter(
idx => idx.tableName == table.pureName && uniqueNames.rows.find(x => x.constraintName == idx.constraintName)
),
'constraintName'
).map(idx => ({
..._.pick(idx, ['constraintName']),
columns: indexes.rows
.filter(col => col.tableName == idx.tableName && col.constraintName == idx.constraintName)
.map(col => ({
..._.pick(col, ['columnName']),
})),
})),
};
}),
views: views.rows.map(view => ({
objectId: `views:${view.schema_name}.${view.pure_name}`,
pureName: view.pure_name,
schemaName: view.schema_name,
contentHash: view.hash_code,
createSql: `CREATE VIEW "${view.schema_name}"."${view.pure_name}"\nAS\n${view.create_sql}`,
columns: columns.rows
.filter(col => col.pure_name == view.pure_name && col.schema_name == view.schema_name)
.map(col => getColumnInfo(col)),
})),
matviews: matviews
? matviews.rows.map(matview => ({
objectId: `matviews:${matview.schema_name}.${matview.pure_name}`,
pureName: matview.pure_name,
schemaName: matview.schema_name,
contentHash: matview.hash_code,
createSql: `CREATE MATERIALIZED VIEW "${matview.schema_name}"."${matview.pure_name}"\nAS\n${matview.definition}`,
columns: matviewColumns.rows
.filter(col => col.pure_name == matview.pure_name && col.schema_name == matview.schema_name)
.map(col => getColumnInfo(col)),
}))
: undefined,
procedures: routines.rows
.filter(x => x.object_type == 'PROCEDURE')
.map(proc => ({
objectId: `procedures:${proc.schema_name}.${proc.pure_name}`,
pureName: proc.pure_name,
schemaName: proc.schema_name,
createSql: `CREATE PROCEDURE "${proc.schema_name}"."${proc.pure_name}"() LANGUAGE ${proc.language}\nAS\n$$\n${proc.definition}\n$$`,
contentHash: proc.hash_code,
})),
functions: routines.rows
.filter(x => x.object_type == 'FUNCTION')
.map(func => ({
objectId: `functions:${func.schema_name}.${func.pure_name}`,
createSql: `CREATE FUNCTION "${func.schema_name}"."${func.pure_name}"() RETURNS ${func.data_type} LANGUAGE ${func.language}\nAS\n$$\n${func.definition}\n$$`,
pureName: func.pure_name,
schemaName: func.schema_name,
contentHash: func.hash_code,
})),
};
this.feedback({ analysingMessage: null });
return res;
}
async _getFastSnapshot() {
return null;
const tableModificationsQueryData = this.driver.dialect.stringAgg
? await this.driver.query(this.pool, this.createQuery('tableModifications'))
: null;
const viewModificationsQueryData = await this.driver.query(this.pool, this.createQuery('viewModifications'));
const matviewModificationsQueryData = this.driver.dialect.materializedViews
? await this.driver.query(this.pool, this.createQuery('matviewModifications'))
: null;
const routineModificationsQueryData = await this.driver.query(this.pool, this.createQuery('routineModifications'));
return {
tables: tableModificationsQueryData
? tableModificationsQueryData.rows.map(x => ({
objectId: `tables:${x.schema_name}.${x.pure_name}`,
pureName: x.pure_name,
schemaName: x.schema_name,
contentHash: `${x.hash_code_columns}-${x.hash_code_constraints}`,
}))
: null,
views: viewModificationsQueryData
? viewModificationsQueryData.rows.map(x => ({
objectId: `views:${x.schema_name}.${x.pure_name}`,
pureName: x.pure_name,
schemaName: x.schema_name,
contentHash: x.hash_code,
}))
: undefined,
matviews: matviewModificationsQueryData
? matviewModificationsQueryData.rows.map(x => ({
objectId: `matviews:${x.schema_name}.${x.pure_name}`,
pureName: x.pure_name,
schemaName: x.schema_name,
contentHash: x.hash_code,
}))
: undefined,
procedures: routineModificationsQueryData.rows
.filter(x => x.object_type == 'PROCEDURE')
.map(x => ({
objectId: `procedures:${x.schema_name}.${x.pure_name}`,
pureName: x.pure_name,
schemaName: x.schema_name,
contentHash: x.hash_code,
})),
functions: routineModificationsQueryData.rows
.filter(x => x.object_type == 'FUNCTION')
.map(x => ({
objectId: `functions:${x.schema_name}.${x.pure_name}`,
pureName: x.pure_name,
schemaName: x.schema_name,
contentHash: x.hash_code,
})),
};
}
}
module.exports = Analyser;

View File

@ -0,0 +1,328 @@
const _ = require('lodash');
const stream = require('stream');
const driverBases = require('../frontend/drivers');
const Analyser = require('./Analyser');
//--const pg = require('pg');
//const oracledb = require('oracledb');
let oracledb; // native module
const { createBulkInsertStreamBase, makeUniqueColumnNames } = require('dbgate-tools');
/*
pg.types.setTypeParser(1082, 'text', val => val); // date
pg.types.setTypeParser(1114, 'text', val => val); // timestamp without timezone
pg.types.setTypeParser(1184, 'text', val => val); // timestamp
*/
function extractOracleColumns(result) {
if (!result /*|| !result.fields */) return [];
const res = result.map(fld => ({
columnName: fld.name, //columnName: fld.name.toLowerCase(),
}));
makeUniqueColumnNames(res);
return res;
}
function zipDataRow(rowArray, columns) {
let obj = _.zipObject(
columns.map(x => x.columnName),
rowArray
);
//console.log('zipDataRow columns', columns);
//console.log('zipDataRow', obj);
return obj;
}
/** @type {import('dbgate-types').EngineDriver} */
const drivers = driverBases.map(driverBase => ({
...driverBase,
analyserClass: Analyser,
async connect({
engine,
server,
port,
user,
password,
database,
databaseUrl,
useDatabaseUrl,
ssl,
isReadOnly,
authType,
socketPath,
}) {
let options = null;
if (engine == 'redshift@dbgate-plugin-oracle') {
let url = databaseUrl;
if (url && url.startsWith('jdbc:redshift://')) {
url = url.substring('jdbc:redshift://'.length);
}
if (user && password) {
url = `oracle://${user}:${password}@${url}`;
} else if (user) {
url = `oracle://${user}@${url}`;
} else {
url = `oracle://${url}`;
}
options = {
connectionString: url,
};
} else {
options = useDatabaseUrl
? {
connectionString: databaseUrl,
}
: {
host: authType == 'socket' ? socketPath || driverBase.defaultSocketPath : server,
port: authType == 'socket' ? null : port,
user,
password,
database: database || 'oracle',
ssl,
};
}
console.log('OPTIONS', options);
/*
const client = new pg.Client(options);
await client.connect();
if (isReadOnly) {
await this.query(client, 'SET SESSION CHARACTERISTICS AS TRANSACTION READ ONLY');
}
*/
client = await oracledb.getConnection( {
user : options.user,
password : options.password,
connectString : options.host
});
return client;
},
async close(pool) {
return pool.end();
},
async query(client, sql) {
//console.log('query sql', sql);
if (sql == null) {
return {
rows: [],
columns: [],
};
}
try {
//console.log('sql3', sql);
const res = await client.execute(sql);
//console.log('res', res);
const columns = extractOracleColumns(res.metaData);
//console.log('columns', columns);
return { rows: (res.rows || []).map(row => zipDataRow(row, columns)), columns };
}
catch(err) {
console.log('Error query', err, sql);
}
finally {
//console.log('finally', sql);
}
},
stream(client, sql, options) {
/*
const query = new pg.Query({
text: sql,
rowMode: 'array',
});
*/
console.log('queryStream', sql);
const query = client.queryStream(sql);
// const consumeStream = new Promise((resolve, reject) => {
let rowcount = 0;
let wasHeader = false;
query.on('metadata', row => {
console.log('metadata', row);
if (!wasHeader) {
columns = extractOracleColumns(row);
if (columns && columns.length > 0) {
options.recordset(columns);
}
wasHeader = true;
}
options.row(zipDataRow(row, columns));
});
query.on('data', row => {
console.log('stream DATA');
if (!wasHeader) {
columns = extractOracleColumns(row);
if (columns && columns.length > 0) {
options.recordset(columns);
}
wasHeader = true;
}
options.row(zipDataRow(row, columns));
});
query.on('end', () => {
const { command, rowCount } = query._result || {};
if (command != 'SELECT' && _.isNumber(rowCount)) {
options.info({
message: `${rowCount} rows affected`,
time: new Date(),
severity: 'info',
});
}
if (!wasHeader) {
columns = extractOracleColumns(query._result);
if (columns && columns.length > 0) {
options.recordset(columns);
}
wasHeader = true;
}
options.done();
});
query.on('error', error => {
console.log('ERROR', error);
const { message, lineNumber, procName } = error;
options.info({
message,
line: lineNumber,
procedure: procName,
time: new Date(),
severity: 'error',
});
options.done();
});
query.on('close', function() {
//console.log("stream 'close' event");
// The underlying ResultSet has been closed, so the connection can now
// be closed, if desired. Note: do not close connections on 'end'.
//resolve(rowcount);
;
});
//});
//const numrows = await consumeStream;
//console.log('Rows selected: ' + numrows);
//client.query(query);
},
async getVersion(client) {
//const { rows } = await this.query(client, "SELECT banner as version FROM v$version WHERE banner LIKE 'Oracle%'");
const { rows } = await this.query(client, "SELECT version as \"version\" FROM v$instance");
const { version } = rows[0];
const isCockroach = false; //version.toLowerCase().includes('cockroachdb');
const isRedshift = false; // version.toLowerCase().includes('redshift');
const isOracle = true;
const m = version.match(/([\d\.]+)/);
//console.log('M', m);
let versionText = null;
let versionMajor = null;
let versionMinor = null;
if (m) {
if (isOracle) versionText = `Oracle ${m[1]}`;
const numbers = m[1].split('.');
if (numbers[0]) versionMajor = parseInt(numbers[0]);
if (numbers[1]) versionMinor = parseInt(numbers[1]);
}
return {
version,
versionText,
isOracle,
isCockroach,
isRedshift,
versionMajor,
versionMinor,
};
},
async readQuery(client, sql, structure) {
/*
const query = new pg.Query({
text: sql,
rowMode: 'array',
});
*/
console.log('readQuery', sql, structure);
const query = await client.queryStream(sql);
let wasHeader = false;
let columns = null;
const pass = new stream.PassThrough({
objectMode: true,
highWaterMark: 100,
});
query.on('metadata', row => {
console.log('readQuery metadata', row);
if (!wasHeader) {
columns = extractOracleColumns(row);
if (columns && columns.length > 0) {
pass.write({
__isStreamHeader: true,
...(structure || { columns }),
});
}
wasHeader = true;
}
pass.write(zipDataRow(row, columns));
});
query.on('data', row => {
console.log('readQuery data', row);
pass.write(zipDataRow(row, columns));
});
query.on('end', () => {
pass.end();
});
query.on('error', error => {
console.error(error);
pass.end();
});
//client.query(query);
return pass;
},
async writeTable(pool, name, options) {
// @ts-ignore
return createBulkInsertStreamBase(this, stream, pool, name, options);
},
async listDatabases(client) {
const { rows } = await this.query(client, 'SELECT instance_name AS \"name\" FROM v$instance');
return rows;
},
getAuthTypes() {
return [
{
title: 'Host and port',
name: 'hostPort',
},
{
title: 'Socket',
name: 'socket',
},
];
},
}));
drivers.initialize = (dbgateEnv) => {
if (dbgateEnv.nativeModules && dbgateEnv.nativeModules.oracledb) {
oracledb = dbgateEnv.nativeModules.oracledb();
}
};
module.exports = drivers;

View File

@ -0,0 +1,9 @@
const drivers = require('./drivers');
module.exports = {
packageName: 'dbgate-plugin-oracle',
drivers,
initialize(dbgateEnv) {
drivers.initialize(dbgateEnv);
},
};

View File

@ -0,0 +1,15 @@
module.exports = `
select
owner as "schema_name",
table_name as "pure_name",
column_name as "column_name",
nullable as "is_nullable",
data_type as "data_type",
data_length as "char_max_length",
data_precision as "numeric_precision",
data_scale as "numeric_scale",
data_default as "default_value"
FROM all_tab_columns av
where TABLE_NAME =OBJECT_ID_CONDITION
order by column_id
`;

View File

@ -0,0 +1,2 @@
module.exports = `
`;

View File

@ -0,0 +1,2 @@
module.exports = `
`;

View File

@ -0,0 +1,2 @@
module.exports = `
`;

View File

@ -0,0 +1,24 @@
module.exports = `
select fk.constraint_name as "constraint_name",
fk.owner as "constraint_schema",
fk.table_name as "pure_name",
fk.owner as "schema_name",
fk.delete_rule as "update_action",
fk.delete_rule as "delete_action",
ref.table_name as "ref_table_name",
ref.owner as "ref_schema_name",
basecol.column_name as "column_name",
refcol.column_name as "ref_column_name"
from all_cons_columns refcol, all_cons_columns basecol, all_constraints ref, all_constraints fk
where fk.constraint_type = 'R'
and ref.owner = fk.r_owner
and ref.constraint_name = fk.r_constraint_name
and basecol.owner = fk.owner
and basecol.constraint_name = fk.constraint_name
and basecol.table_name = fk.table_name
and refcol.owner = ref.owner
and refcol.constraint_name = ref.constraint_name
and refcol.table_name = ref.table_name
AND fk.constraint_name =OBJECT_ID_CONDITION
order by basecol.position
`;

View File

@ -0,0 +1,2 @@
module.exports = `
`;

View File

@ -0,0 +1,2 @@
module.exports = `
`;

View File

@ -0,0 +1,41 @@
const columns = require('./columns');
const tableModifications = require('./tableList');
const tableList = require('./tableList');
const viewModifications = require('./views');
const matviewModifications = require('./matviews');
const primaryKeys = require('./primaryKeys');
const foreignKeys = require('./foreignKeys');
const views = require('./views');
const matviews = require('./matviews');
const routines = require('./routines');
const routineModifications = require('./routines');
const matviewColumns = require('./matviewColumns');
const indexes = require('./indexes'); // use mysql
//const indexcols = require('./indexcols');
const uniqueNames = require('./uniqueNames');
//const geometryColumns = require('./geometryColumns');
//const geographyColumns = require('./geographyColumns');
//const fk_keyColumnUsage = require('./fk_key_column_usage');
//const fk_referentialConstraints = require('./fk_referential_constraints');
//const fk_tableConstraints = require('./fk_table_constraints');
module.exports = {
columns,
tableModifications,
tableList,
viewModifications,
primaryKeys,
foreignKeys,
views,
routines,
routineModifications,
matviews,
matviewModifications,
matviewColumns,
indexes,
// indexcols,
uniqueNames,
//geometryColumns,
//geographyColumns,
};

View File

@ -0,0 +1,2 @@
module.exports = `
`;

View File

@ -0,0 +1,18 @@
module.exports = `
select i.table_name as "tableName",
i.table_owner as "schemaName",
i.index_name as "constraintName",
i.index_type as "indexType",
i.uniqueness as "Unique",
ic.column_name as "columnName",
ic.column_position as "postion",
ic.descend as "descending"
from all_ind_columns ic, all_indexes i
where ic.index_owner = i.owner
and ic.index_name = i.index_name
and i.index_name =OBJECT_ID_CONDITION
order by i.table_owner,
i.table_name,
i.index_name,
ic.column_position
`;

View File

@ -0,0 +1,9 @@
module.exports = `
SELECT owner "schema_name"
, table_name "pure_name"
, column_name "column_name"
, data_type "data_type"
FROM all_tab_columns av
where table_name =OBJECT_ID_CONDITION
order by column_id
`;

View File

@ -0,0 +1,2 @@
module.exports = `
`;

View File

@ -0,0 +1,15 @@
module.exports = `
select owner as schema_name,
mview_name pure_name,
container_name,
query as definition,
ora_hash(query, 'MD5') as "hash_code"
--refresh_mode,
--refresh_method,
--build_mode,
--last_refresh_date,
--ompile_state
from all_mviews
where mview_name=OBJECT_ID_CONDITION
order by owner, mview_name
`;

View File

@ -0,0 +1,16 @@
module.exports = `
select
pk.owner as "constraint_schema",
pk.constraint_name as "constraint_name",
pk.owner as "schema_name",
pk.table_name as "pure_name",
basecol.column_name as "column_name"
from all_cons_columns basecol,
all_constraints pk
where constraint_type = 'P'
and basecol.owner = pk.owner
and basecol.constraint_name = pk.constraint_name
and basecol.table_name = pk.table_name
and pk.constraint_name =OBJECT_ID_CONDITION
order by basecol.position
`;

View File

@ -0,0 +1,2 @@
module.exports = `
`;

View File

@ -0,0 +1,40 @@
module.exports = `
select
routine_name as "pure_name",
routine_schema as "schema_name",
routine_definition as "definition",
standard_hash(routine_definition, 'MD5') as "hash_code",
routine_type as "object_type",
'fixme_data_type' as "data_type",
'fixme_external_language' as "language"
from (select
sys_context('userenv', 'DB_NAME') routine_catalog,
sys_context('userenv', 'DB_NAME') specific_catalog,
ap.owner specific_schema,
ap.owner routine_schema,
decode( ap.procedure_name, null, ap.object_name || ap.procedure_name, ap.procedure_name ) specific_name,
decode( ap.procedure_name, null, ap.object_name || ap.procedure_name, ap.procedure_name ) routine_name,
ao.object_type routine_type,
decode(impltypeowner, null, to_char(null), SYS_CONTEXT('userenv', 'DB_NAME')) type_udt_catalog,
--to_clob(get_proc_text(ap.owner, ap.object_name, ao.object_type, 32767)) routine_body,
'fixme_routine_body.' || ap.owner || '.' || decode( ap.procedure_name, null, ap.object_name || ap.procedure_name, ap.procedure_name ) routine_body,
--to_clob(get_proc_text(ap.owner, ap.object_name, ao.object_type, 4000)) routine_definition,
'fixme_routine_definition.' || ap.owner || '.' || decode( ap.procedure_name, null, ap.object_name || ap.procedure_name, ap.procedure_name ) routine_definition,
sys_context('userenv', 'DB_NAME') character_set_catalog,
'SYS' character_set_schema,
sys_context('userenv', 'DB_NAME') collation_catalog,
'SYS' collation_schema,
deterministic is_deterministic,
pipelined is_pipelined ,
aggregate is_aggregate,
authid is_definer
from
all_procedures ap,
all_objects ao
where
ap.owner = ao.owner and
ap.object_name = ao.object_name and
ao.object_type in ('PACKAGE', 'PROCEDURE', 'FUNCTION')
and ao.object_name =OBJECT_ID_CONDITION
) routines
`;

View File

@ -0,0 +1,9 @@
module.exports = `
select
owner "schema_name",
table_name "pure_name"
from
all_tables
where TABLE_NAME =OBJECT_ID_CONDITION
`;

View File

@ -0,0 +1,2 @@
module.exports = `
`;

View File

@ -0,0 +1,6 @@
module.exports = `
select constraint_name
from all_constraints
where constraint_type = 'U'
and constraint_name =OBJECT_ID_CONDITION
`;

View File

@ -0,0 +1,2 @@
module.exports = `
`;

View File

@ -0,0 +1,12 @@
module.exports = `
select avv.*,
ora_hash("create_sql") as "hash_code"
from (select
view_name as "pure_name",
owner as "schema_name",
SUBSTR(text_vc, 1, 3900) AS "create_sql"
from all_views av
where text_vc is not null
) avv
where "pure_name" =OBJECT_ID_CONDITION
`;

View File

@ -0,0 +1,100 @@
const { SqlDumper, arrayToHexString, testEqualTypes } = global.DBGATE_TOOLS;
class Dumper extends SqlDumper {
/** @param type {import('dbgate-types').TransformType} */
transform(type, dumpExpr) {
switch (type) {
case 'GROUP:YEAR':
case 'YEAR':
this.put('^extract(^year ^from %c)', dumpExpr);
break;
case 'MONTH':
this.put('^extract(^month ^from %c)', dumpExpr);
break;
case 'DAY':
this.put('^extract(^day ^from %c)', dumpExpr);
break;
case 'GROUP:MONTH':
this.put("^to_char(%c, '%s')", dumpExpr, 'YYYY-MM');
break;
case 'GROUP:DAY':
this.put("^to_char(%c, '%s')", dumpExpr, 'YYYY-MM-DD');
break;
default:
dumpExpr();
break;
}
}
dropRecreatedTempTable(tmptable) {
this.putCmd('^drop ^table %i ^cascade', tmptable);
}
renameTable(obj, newname) {
this.putCmd('^alter ^table %f ^rename ^to %i', obj, newname);
}
renameColumn(column, newcol) {
this.putCmd('^alter ^table %f ^rename ^column %i ^to %i', column, column.columnName, newcol);
}
dropTable(obj, options = {}) {
this.put('^drop ^table');
if (options.testIfExists) this.put(' ^if ^exists');
this.put(' %f', obj);
this.endCommand();
}
//public override void CreateIndex(IndexInfo ix)
//{
//}
enableConstraints(table, enabled) {
this.putCmd('^alter ^table %f %k ^trigger ^all', table, enabled ? 'enable' : 'disable');
}
columnDefinition(col, options) {
if (col.autoIncrement) {
this.put('^serial');
return;
}
super.columnDefinition(col, options);
}
changeColumn(oldcol, newcol, constraints) {
if (oldcol.columnName != newcol.columnName) {
this.putCmd('^alter ^table %f ^rename ^column %i ^to %i', oldcol, oldcol.columnName, newcol.columnName);
}
if (!testEqualTypes(oldcol, newcol)) {
this.putCmd('^alter ^table %f ^alter ^column %i ^type %s', oldcol, newcol.columnName, newcol.dataType);
}
if (oldcol.notNull != newcol.notNull) {
if (newcol.notNull) this.putCmd('^alter ^table %f ^alter ^column %i ^set ^not ^null', newcol, newcol.columnName);
else this.putCmd('^alter ^table %f ^alter ^column %i ^drop ^not ^null', newcol, newcol.columnName);
}
if (oldcol.defaultValue != newcol.defaultValue) {
if (newcol.defaultValue == null) {
this.putCmd('^alter ^table %f ^alter ^column %i ^drop ^default', newcol, newcol.columnName);
} else {
this.putCmd(
'^alter ^table %f ^alter ^column %i ^set ^default %s',
newcol,
newcol.columnName,
newcol.defaultValue
);
}
}
}
putValue(value) {
if (value === true) this.putRaw('true');
else if (value === false) this.putRaw('false');
else super.putValue(value);
}
putByteArrayValue(value) {
this.putRaw(`e'\\\\x${arrayToHexString(value)}'`);
}
}
module.exports = Dumper;

View File

@ -0,0 +1,202 @@
const { driverBase } = global.DBGATE_TOOLS;
const Dumper = require('./Dumper');
const { oracleSplitterOptions } = require('dbgate-query-splitter/lib/options');
const spatialTypes = ['GEOGRAPHY'];
/** @type {import('dbgate-types').SqlDialect} */
const dialect = {
rangeSelect: false,
limitSelect: false,
offsetFetchRangeSyntax: true,
ilike: true,
// stringEscapeChar: '\\',
stringEscapeChar: "'",
fallbackDataType: 'varchar',
anonymousPrimaryKey: true,
enableConstraintsPerTable: true,
dropColumnDependencies: ['dependencies'],
quoteIdentifier(s) {
return '"' + s + '"';
},
stringAgg: true,
createColumn: true,
dropColumn: true,
changeColumn: true,
createIndex: true,
dropIndex: true,
createForeignKey: true,
dropForeignKey: true,
createPrimaryKey: true,
dropPrimaryKey: true,
createUnique: true,
dropUnique: true,
createCheck: true,
dropCheck: true,
dropReferencesWhenDropTable: true,
predefinedDataTypes: [
'bigint',
'bigserial',
'bit',
'varbit',
'boolean',
'box',
'bytea',
'char(20)',
'varchar(250)',
'cidr',
'circle',
'date',
'double precision',
'inet',
'int',
'interval',
'json',
'jsonb',
'line',
'lseg',
'macaddr',
'macaddr8',
'money',
'numeric(10,2)',
'path',
'pg_lsn',
'pg_snapshot',
'point',
'polygon',
'real',
'smallint',
'smallserial',
'serial',
'text',
'time',
'timetz',
'timestamp',
'timestamptz',
'tsquery',
'tsvector',
'txid_snapshot',
'uuid',
'xml',
],
createColumnViewExpression(columnName, dataType, source, alias) {
if (dataType && spatialTypes.includes(dataType.toUpperCase())) {
return {
exprType: 'call',
func: 'ST_AsText',
alias: alias || columnName,
args: [
{
exprType: 'column',
columnName,
source,
},
],
};
}
},
};
const oracleDriverBase = {
...driverBase,
dumperClass: Dumper,
dialect,
// showConnectionField: (field, values) =>
// ['server', 'port', 'user', 'password', 'defaultDatabase', 'singleDatabase'].includes(field),
getQuerySplitterOptions: () => oracleSplitterOptions,
readOnlySessions: true,
databaseUrlPlaceholder: 'e.g. oracledb://user:password@localhost:1521',
showConnectionField: (field, values) => {
if (field == 'useDatabaseUrl') return true;
if (values.useDatabaseUrl) {
return ['databaseUrl', 'isReadOnly'].includes(field);
}
return (
['authType', 'user', 'password', 'defaultDatabase', 'singleDatabase', 'isReadOnly'].includes(field) ||
(values.authType == 'socket' && ['socketPath'].includes(field)) ||
(values.authType != 'socket' && ['server', 'port'].includes(field))
);
},
beforeConnectionSave: connection => {
const { databaseUrl } = connection;
if (databaseUrl) {
const m = databaseUrl.match(/\/([^/]+)($|\?)/);
return {
...connection,
singleDatabase: !!m,
defaultDatabase: m ? m[1] : null,
};
}
return connection;
},
__analyserInternals: {
refTableCond: '',
},
getNewObjectTemplates() {
return [
{ label: 'New view', sql: 'CREATE VIEW myview\nAS\nSELECT * FROM table1' },
{ label: 'New materialized view', sql: 'CREATE MATERIALIZED VIEW myview\nAS\nSELECT * FROM table1' },
{
label: 'New procedure',
sql: `CREATE PROCEDURE myproc (arg1 INT)
LANGUAGE SQL
AS $$
SELECT * FROM table1;
$$`,
},
{
label: 'New function (plpgsql)',
sql: `CREATE FUNCTION myfunc (arg1 INT)
RETURNS INT
AS $$
BEGIN
RETURN 1;
END
$$ LANGUAGE plpgsql;`,
},
];
},
authTypeLabel: 'Connection mode',
defaultAuthTypeName: 'hostPort',
defaultSocketPath: '/var/run/oracledb',
};
/** @type {import('dbgate-types').EngineDriver} */
const oracleDriver = {
...oracleDriverBase,
engine: 'oracle@dbgate-plugin-oracle',
title: 'OracleDB',
defaultPort: 1521,
dialect: {
...dialect,
materializedViews: true,
},
dialectByVersion(version) {
if (version) {
return {
...dialect,
materializedViews:
version &&
version.versionMajor != null &&
version.versionMinor != null &&
(version.versionMajor > 9 || version.versionMajor == 9 || version.versionMinor >= 3),
};
}
return dialect;
},
};
module.exports = [oracleDriver];

View File

@ -0,0 +1,6 @@
import drivers from './drivers';
export default {
packageName: 'dbgate-plugin-oracle',
drivers,
};

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,40 @@
var webpack = require('webpack');
var path = require('path');
var config = {
context: __dirname + '/src/backend',
entry: {
app: './index.js',
},
target: 'node',
output: {
path: path.resolve(__dirname, 'dist'),
filename: 'backend.js',
libraryTarget: 'commonjs2',
},
// uncomment for disable minimalization
// optimization: {
// minimize: false,
// },
plugins: [
new webpack.IgnorePlugin({
checkResource(resource) {
const lazyImports = ['oracledb', 'uws'];
if (!lazyImports.includes(resource)) {
return false;
}
try {
require.resolve(resource);
} catch (err) {
return true;
}
return false;
},
}),
],
};
module.exports = config;

View File

@ -0,0 +1,30 @@
var webpack = require('webpack');
var path = require('path');
var config = {
context: __dirname + '/src/frontend',
entry: {
app: './index.js',
},
target: 'web',
output: {
path: path.resolve(__dirname, 'dist'),
filename: 'frontend.js',
libraryTarget: 'var',
library: 'plugin',
},
plugins: [
new webpack.DefinePlugin({
'global.DBGATE_TOOLS': 'window.DBGATE_TOOLS',
}),
],
// uncomment for disable minimalization
// optimization: {
// minimize: false,
// },
};
module.exports = config;

4
start.bat Normal file
View File

@ -0,0 +1,4 @@
set NODE_OPTIONS=--openssl-legacy-provider
start cmd /k yarn start:api
start cmd /k yarn start:web
start cmd /k yarn lib