stream header flag + export from mongo

This commit is contained in:
Jan Prochazka 2021-04-08 17:49:57 +02:00
parent a5d37eb528
commit 20fccf51d9
9 changed files with 83 additions and 12 deletions

View File

@ -22,7 +22,13 @@ class TableWriter {
this.currentFile = path.join(jsldir(), `${this.jslid}.jsonl`); this.currentFile = path.join(jsldir(), `${this.jslid}.jsonl`);
this.currentRowCount = 0; this.currentRowCount = 0;
this.currentChangeIndex = 1; this.currentChangeIndex = 1;
fs.writeFileSync(this.currentFile, JSON.stringify(structure) + '\n'); fs.writeFileSync(
this.currentFile,
JSON.stringify({
...structure,
__isStreamHeader: true,
}) + '\n'
);
this.currentStream = fs.createWriteStream(this.currentFile, { flags: 'a' }); this.currentStream = fs.createWriteStream(this.currentFile, { flags: 'a' });
this.writeCurrentStats(false, false); this.writeCurrentStats(false, false);
this.resultIndex = resultIndex; this.resultIndex = resultIndex;

View File

@ -1,9 +1,13 @@
const EnsureStreamHeaderStream = require('../utility/EnsureStreamHeaderStream');
function copyStream(input, output) { function copyStream(input, output) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const ensureHeader = new EnsureStreamHeaderStream();
const finisher = output['finisher'] || output; const finisher = output['finisher'] || output;
finisher.on('finish', resolve); finisher.on('finish', resolve);
finisher.on('error', reject); finisher.on('error', reject);
input.pipe(output); input.pipe(ensureHeader);
ensureHeader.pipe(output);
}); });
} }

View File

@ -3,9 +3,8 @@ const stream = require('stream');
const byline = require('byline'); const byline = require('byline');
class ParseStream extends stream.Transform { class ParseStream extends stream.Transform {
constructor({ header, limitRows }) { constructor({ limitRows }) {
super({ objectMode: true }); super({ objectMode: true });
this.header = header;
this.wasHeader = false; this.wasHeader = false;
this.limitRows = limitRows; this.limitRows = limitRows;
this.rowsWritten = 0; this.rowsWritten = 0;
@ -13,7 +12,14 @@ class ParseStream extends stream.Transform {
_transform(chunk, encoding, done) { _transform(chunk, encoding, done) {
const obj = JSON.parse(chunk); const obj = JSON.parse(chunk);
if (!this.wasHeader) { if (!this.wasHeader) {
if (!this.header) this.push({ columns: Object.keys(obj).map(columnName => ({ columnName })) }); if (
!obj.__isStreamHeader &&
// TODO remove isArray test
!Array.isArray(obj.columns)
) {
this.push({ columns: Object.keys(obj).map(columnName => ({ columnName })) });
}
this.wasHeader = true; this.wasHeader = true;
} }
if (!this.limitRows || this.rowsWritten < this.limitRows) { if (!this.limitRows || this.rowsWritten < this.limitRows) {
@ -24,12 +30,12 @@ class ParseStream extends stream.Transform {
} }
} }
async function jsonLinesReader({ fileName, encoding = 'utf-8', header = true, limitRows = undefined }) { async function jsonLinesReader({ fileName, encoding = 'utf-8', limitRows = undefined }) {
console.log(`Reading file ${fileName}`); console.log(`Reading file ${fileName}`);
const fileStream = fs.createReadStream(fileName, encoding); const fileStream = fs.createReadStream(fileName, encoding);
const liner = byline(fileStream); const liner = byline(fileStream);
const parser = new ParseStream({ header, limitRows }); const parser = new ParseStream({ limitRows });
liner.pipe(parser); liner.pipe(parser);
return parser; return parser;
} }

View File

@ -8,10 +8,16 @@ class StringifyStream extends stream.Transform {
this.wasHeader = false; this.wasHeader = false;
} }
_transform(chunk, encoding, done) { _transform(chunk, encoding, done) {
let skip = false;
if (!this.wasHeader) { if (!this.wasHeader) {
if (this.header) this.push(JSON.stringify(chunk) + '\n'); skip =
(chunk.__isStreamHeader ||
// TODO remove isArray test
Array.isArray(chunk.columns)) &&
!this.header;
this.wasHeader = true; this.wasHeader = true;
} else { }
if (!skip) {
this.push(JSON.stringify(chunk) + '\n'); this.push(JSON.stringify(chunk) + '\n');
} }
done(); done();

View File

@ -1,6 +1,5 @@
const { quoteFullName, fullNameToString } = require('dbgate-tools'); const { quoteFullName, fullNameToString } = require('dbgate-tools');
const requireEngineDriver = require('../utility/requireEngineDriver'); const requireEngineDriver = require('../utility/requireEngineDriver');
const { decryptConnection } = require('../utility/crypting');
const connectUtility = require('../utility/connectUtility'); const connectUtility = require('../utility/connectUtility');
async function tableReader({ connection, pureName, schemaName }) { async function tableReader({ connection, pureName, schemaName }) {
@ -10,6 +9,13 @@ async function tableReader({ connection, pureName, schemaName }) {
const fullName = { pureName, schemaName }; const fullName = { pureName, schemaName };
if (driver.dialect.nosql) {
// @ts-ignore
console.log(`Reading collection ${fullNameToString(fullName)}`);
// @ts-ignore
return await driver.readQuery(pool, JSON.stringify(fullName));
}
const table = await driver.analyseSingleObject(pool, fullName, 'tables'); const table = await driver.analyseSingleObject(pool, fullName, 'tables');
const query = `select * from ${quoteFullName(driver.dialect, fullName)}`; const query = `select * from ${quoteFullName(driver.dialect, fullName)}`;
if (table) { if (table) {

View File

@ -0,0 +1,35 @@
const stream = require('stream');
class EnsureStreamHeaderStream extends stream.Transform {
constructor() {
super({ objectMode: true });
this.wasHeader = false;
}
_transform(chunk, encoding, done) {
if (!this.wasHeader) {
if (chunk.__isDynamicStructure) {
// ignore dynamic structure header
done();
return;
}
if (
!chunk.__isStreamHeader &&
// TODO remove isArray test
!Array.isArray(chunk.columns)
) {
this.push({
__isStreamHeader: true,
__isComputedStructure: true,
columns: Object.keys(chunk).map(columnName => ({ columnName })),
});
}
this.wasHeader = true;
}
this.push(chunk);
done();
}
}
module.exports = EnsureStreamHeaderStream;

View File

@ -202,6 +202,10 @@
}, },
}, },
}, },
{
label: 'Export',
isExport: true,
},
], ],
}; };

View File

@ -15,7 +15,11 @@
const { values, setFieldValue } = getFormContext(); const { values, setFieldValue } = getFormContext();
$: dbinfo = useDatabaseInfo({ conid: $values[conidName], database: $values[databaseName] }); $: dbinfo = useDatabaseInfo({ conid: $values[conidName], database: $values[databaseName] });
$: tablesOptions = [...(($dbinfo && $dbinfo.tables) || []), ...(($dbinfo && $dbinfo.views) || [])] $: tablesOptions = [
...(($dbinfo && $dbinfo.tables) || []),
...(($dbinfo && $dbinfo.views) || []),
...(($dbinfo && $dbinfo.collections) || []),
]
.filter(x => !$values[schemaName] || x.schemaName == $values[schemaName]) .filter(x => !$values[schemaName] || x.schemaName == $values[schemaName])
.map(x => ({ .map(x => ({
value: x.pureName, value: x.pureName,

View File

@ -87,7 +87,7 @@
schemaName={schemaNameField} schemaName={schemaNameField}
databaseName={databaseNameField} databaseName={databaseNameField}
name={tablesField} name={tablesField}
label="Tables / views" label="Tables / views / collections"
/> />
{/if} {/if}
{/if} {/if}