mirror of
https://github.com/dbgate/dbgate
synced 2024-11-07 20:26:23 +00:00
stream header flag + export from mongo
This commit is contained in:
parent
a5d37eb528
commit
20fccf51d9
@ -22,7 +22,13 @@ class TableWriter {
|
|||||||
this.currentFile = path.join(jsldir(), `${this.jslid}.jsonl`);
|
this.currentFile = path.join(jsldir(), `${this.jslid}.jsonl`);
|
||||||
this.currentRowCount = 0;
|
this.currentRowCount = 0;
|
||||||
this.currentChangeIndex = 1;
|
this.currentChangeIndex = 1;
|
||||||
fs.writeFileSync(this.currentFile, JSON.stringify(structure) + '\n');
|
fs.writeFileSync(
|
||||||
|
this.currentFile,
|
||||||
|
JSON.stringify({
|
||||||
|
...structure,
|
||||||
|
__isStreamHeader: true,
|
||||||
|
}) + '\n'
|
||||||
|
);
|
||||||
this.currentStream = fs.createWriteStream(this.currentFile, { flags: 'a' });
|
this.currentStream = fs.createWriteStream(this.currentFile, { flags: 'a' });
|
||||||
this.writeCurrentStats(false, false);
|
this.writeCurrentStats(false, false);
|
||||||
this.resultIndex = resultIndex;
|
this.resultIndex = resultIndex;
|
||||||
|
@ -1,9 +1,13 @@
|
|||||||
|
const EnsureStreamHeaderStream = require('../utility/EnsureStreamHeaderStream');
|
||||||
|
|
||||||
function copyStream(input, output) {
|
function copyStream(input, output) {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
|
const ensureHeader = new EnsureStreamHeaderStream();
|
||||||
const finisher = output['finisher'] || output;
|
const finisher = output['finisher'] || output;
|
||||||
finisher.on('finish', resolve);
|
finisher.on('finish', resolve);
|
||||||
finisher.on('error', reject);
|
finisher.on('error', reject);
|
||||||
input.pipe(output);
|
input.pipe(ensureHeader);
|
||||||
|
ensureHeader.pipe(output);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3,9 +3,8 @@ const stream = require('stream');
|
|||||||
const byline = require('byline');
|
const byline = require('byline');
|
||||||
|
|
||||||
class ParseStream extends stream.Transform {
|
class ParseStream extends stream.Transform {
|
||||||
constructor({ header, limitRows }) {
|
constructor({ limitRows }) {
|
||||||
super({ objectMode: true });
|
super({ objectMode: true });
|
||||||
this.header = header;
|
|
||||||
this.wasHeader = false;
|
this.wasHeader = false;
|
||||||
this.limitRows = limitRows;
|
this.limitRows = limitRows;
|
||||||
this.rowsWritten = 0;
|
this.rowsWritten = 0;
|
||||||
@ -13,7 +12,14 @@ class ParseStream extends stream.Transform {
|
|||||||
_transform(chunk, encoding, done) {
|
_transform(chunk, encoding, done) {
|
||||||
const obj = JSON.parse(chunk);
|
const obj = JSON.parse(chunk);
|
||||||
if (!this.wasHeader) {
|
if (!this.wasHeader) {
|
||||||
if (!this.header) this.push({ columns: Object.keys(obj).map(columnName => ({ columnName })) });
|
if (
|
||||||
|
!obj.__isStreamHeader &&
|
||||||
|
// TODO remove isArray test
|
||||||
|
!Array.isArray(obj.columns)
|
||||||
|
) {
|
||||||
|
this.push({ columns: Object.keys(obj).map(columnName => ({ columnName })) });
|
||||||
|
}
|
||||||
|
|
||||||
this.wasHeader = true;
|
this.wasHeader = true;
|
||||||
}
|
}
|
||||||
if (!this.limitRows || this.rowsWritten < this.limitRows) {
|
if (!this.limitRows || this.rowsWritten < this.limitRows) {
|
||||||
@ -24,12 +30,12 @@ class ParseStream extends stream.Transform {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function jsonLinesReader({ fileName, encoding = 'utf-8', header = true, limitRows = undefined }) {
|
async function jsonLinesReader({ fileName, encoding = 'utf-8', limitRows = undefined }) {
|
||||||
console.log(`Reading file ${fileName}`);
|
console.log(`Reading file ${fileName}`);
|
||||||
|
|
||||||
const fileStream = fs.createReadStream(fileName, encoding);
|
const fileStream = fs.createReadStream(fileName, encoding);
|
||||||
const liner = byline(fileStream);
|
const liner = byline(fileStream);
|
||||||
const parser = new ParseStream({ header, limitRows });
|
const parser = new ParseStream({ limitRows });
|
||||||
liner.pipe(parser);
|
liner.pipe(parser);
|
||||||
return parser;
|
return parser;
|
||||||
}
|
}
|
||||||
|
@ -8,10 +8,16 @@ class StringifyStream extends stream.Transform {
|
|||||||
this.wasHeader = false;
|
this.wasHeader = false;
|
||||||
}
|
}
|
||||||
_transform(chunk, encoding, done) {
|
_transform(chunk, encoding, done) {
|
||||||
|
let skip = false;
|
||||||
if (!this.wasHeader) {
|
if (!this.wasHeader) {
|
||||||
if (this.header) this.push(JSON.stringify(chunk) + '\n');
|
skip =
|
||||||
|
(chunk.__isStreamHeader ||
|
||||||
|
// TODO remove isArray test
|
||||||
|
Array.isArray(chunk.columns)) &&
|
||||||
|
!this.header;
|
||||||
this.wasHeader = true;
|
this.wasHeader = true;
|
||||||
} else {
|
}
|
||||||
|
if (!skip) {
|
||||||
this.push(JSON.stringify(chunk) + '\n');
|
this.push(JSON.stringify(chunk) + '\n');
|
||||||
}
|
}
|
||||||
done();
|
done();
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
const { quoteFullName, fullNameToString } = require('dbgate-tools');
|
const { quoteFullName, fullNameToString } = require('dbgate-tools');
|
||||||
const requireEngineDriver = require('../utility/requireEngineDriver');
|
const requireEngineDriver = require('../utility/requireEngineDriver');
|
||||||
const { decryptConnection } = require('../utility/crypting');
|
|
||||||
const connectUtility = require('../utility/connectUtility');
|
const connectUtility = require('../utility/connectUtility');
|
||||||
|
|
||||||
async function tableReader({ connection, pureName, schemaName }) {
|
async function tableReader({ connection, pureName, schemaName }) {
|
||||||
@ -10,6 +9,13 @@ async function tableReader({ connection, pureName, schemaName }) {
|
|||||||
|
|
||||||
const fullName = { pureName, schemaName };
|
const fullName = { pureName, schemaName };
|
||||||
|
|
||||||
|
if (driver.dialect.nosql) {
|
||||||
|
// @ts-ignore
|
||||||
|
console.log(`Reading collection ${fullNameToString(fullName)}`);
|
||||||
|
// @ts-ignore
|
||||||
|
return await driver.readQuery(pool, JSON.stringify(fullName));
|
||||||
|
}
|
||||||
|
|
||||||
const table = await driver.analyseSingleObject(pool, fullName, 'tables');
|
const table = await driver.analyseSingleObject(pool, fullName, 'tables');
|
||||||
const query = `select * from ${quoteFullName(driver.dialect, fullName)}`;
|
const query = `select * from ${quoteFullName(driver.dialect, fullName)}`;
|
||||||
if (table) {
|
if (table) {
|
||||||
|
35
packages/api/src/utility/EnsureStreamHeaderStream.js
Normal file
35
packages/api/src/utility/EnsureStreamHeaderStream.js
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
const stream = require('stream');
|
||||||
|
|
||||||
|
class EnsureStreamHeaderStream extends stream.Transform {
|
||||||
|
constructor() {
|
||||||
|
super({ objectMode: true });
|
||||||
|
this.wasHeader = false;
|
||||||
|
}
|
||||||
|
_transform(chunk, encoding, done) {
|
||||||
|
if (!this.wasHeader) {
|
||||||
|
if (chunk.__isDynamicStructure) {
|
||||||
|
// ignore dynamic structure header
|
||||||
|
done();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
!chunk.__isStreamHeader &&
|
||||||
|
// TODO remove isArray test
|
||||||
|
!Array.isArray(chunk.columns)
|
||||||
|
) {
|
||||||
|
this.push({
|
||||||
|
__isStreamHeader: true,
|
||||||
|
__isComputedStructure: true,
|
||||||
|
columns: Object.keys(chunk).map(columnName => ({ columnName })),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
this.wasHeader = true;
|
||||||
|
}
|
||||||
|
this.push(chunk);
|
||||||
|
done();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = EnsureStreamHeaderStream;
|
@ -202,6 +202,10 @@
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
label: 'Export',
|
||||||
|
isExport: true,
|
||||||
|
},
|
||||||
],
|
],
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -15,7 +15,11 @@
|
|||||||
const { values, setFieldValue } = getFormContext();
|
const { values, setFieldValue } = getFormContext();
|
||||||
$: dbinfo = useDatabaseInfo({ conid: $values[conidName], database: $values[databaseName] });
|
$: dbinfo = useDatabaseInfo({ conid: $values[conidName], database: $values[databaseName] });
|
||||||
|
|
||||||
$: tablesOptions = [...(($dbinfo && $dbinfo.tables) || []), ...(($dbinfo && $dbinfo.views) || [])]
|
$: tablesOptions = [
|
||||||
|
...(($dbinfo && $dbinfo.tables) || []),
|
||||||
|
...(($dbinfo && $dbinfo.views) || []),
|
||||||
|
...(($dbinfo && $dbinfo.collections) || []),
|
||||||
|
]
|
||||||
.filter(x => !$values[schemaName] || x.schemaName == $values[schemaName])
|
.filter(x => !$values[schemaName] || x.schemaName == $values[schemaName])
|
||||||
.map(x => ({
|
.map(x => ({
|
||||||
value: x.pureName,
|
value: x.pureName,
|
||||||
|
@ -87,7 +87,7 @@
|
|||||||
schemaName={schemaNameField}
|
schemaName={schemaNameField}
|
||||||
databaseName={databaseNameField}
|
databaseName={databaseNameField}
|
||||||
name={tablesField}
|
name={tablesField}
|
||||||
label="Tables / views"
|
label="Tables / views / collections"
|
||||||
/>
|
/>
|
||||||
{/if}
|
{/if}
|
||||||
{/if}
|
{/if}
|
||||||
|
Loading…
Reference in New Issue
Block a user