JSON import rootField support
Some checks are pending
Run tests / test-runner (push) Waiting to run

This commit is contained in:
Jan Prochazka 2024-09-17 13:23:51 +02:00
parent fd8a28831e
commit 56f015ffd5
2 changed files with 47 additions and 8 deletions

View File

@ -120,3 +120,32 @@ test('JSON object import test', async () => {
{ mykey: 'k2', id: 2, val: 'v2' }, { mykey: 'k2', id: 2, val: 'v2' },
]); ]);
}); });
test('JSON filtered object import test', async () => {
const jsonFileName = tmp.tmpNameSync();
fs.writeFileSync(
jsonFileName,
JSON.stringify({
filtered: {
k1: { id: 1, val: 'v1' },
k2: { id: 2, val: 'v2' },
},
})
);
const reader = await dbgateApi.jsonReader({
fileName: jsonFileName,
jsonStyle: 'object',
keyField: 'mykey',
rootField: 'filtered',
});
const rows = await getReaderRows(reader);
expect(rows.length).toEqual(2);
expect(rows).toEqual([
{ mykey: 'k1', id: 1, val: 'v1' },
{ mykey: 'k2', id: 2, val: 'v2' },
]);
});

View File

@ -2,12 +2,13 @@ const fs = require('fs');
const stream = require('stream'); const stream = require('stream');
const byline = require('byline'); const byline = require('byline');
const { getLogger } = require('dbgate-tools'); const { getLogger } = require('dbgate-tools');
const logger = getLogger('jsonReader');
const { parser } = require('stream-json'); const { parser } = require('stream-json');
const { pick } = require('stream-json/filters/Pick'); const { pick } = require('stream-json/filters/Pick');
const { streamArray } = require('stream-json/streamers/StreamArray'); const { streamArray } = require('stream-json/streamers/StreamArray');
const { streamObject } = require('stream-json/streamers/StreamObject'); const { streamObject } = require('stream-json/streamers/StreamObject');
const logger = getLogger('jsonReader');
class ParseStream extends stream.Transform { class ParseStream extends stream.Transform {
constructor({ limitRows, jsonStyle, keyField }) { constructor({ limitRows, jsonStyle, keyField }) {
super({ objectMode: true }); super({ objectMode: true });
@ -42,7 +43,14 @@ class ParseStream extends stream.Transform {
} }
} }
async function jsonReader({ fileName, jsonStyle, keyField = '_key', encoding = 'utf-8', limitRows = undefined }) { async function jsonReader({
fileName,
jsonStyle,
keyField = '_key',
rootField = null,
encoding = 'utf-8',
limitRows = undefined,
}) {
logger.info(`Reading file ${fileName}`); logger.info(`Reading file ${fileName}`);
const fileStream = fs.createReadStream( const fileStream = fs.createReadStream(
@ -55,16 +63,18 @@ async function jsonReader({ fileName, jsonStyle, keyField = '_key', encoding = '
const parseStream = new ParseStream({ limitRows, jsonStyle, keyField }); const parseStream = new ParseStream({ limitRows, jsonStyle, keyField });
if (jsonStyle === 'object') { const tramsformer = jsonStyle === 'object' ? streamObject() : streamArray();
const tramsformer = streamObject();
parseJsonStream.pipe(tramsformer); if (rootField) {
tramsformer.pipe(parseStream); const filterStream = pick({ filter: rootField });
parseJsonStream.pipe(filterStream);
filterStream.pipe(tramsformer);
} else { } else {
const tramsformer = streamArray();
parseJsonStream.pipe(tramsformer); parseJsonStream.pipe(tramsformer);
tramsformer.pipe(parseStream);
} }
tramsformer.pipe(parseStream);
return parseStream; return parseStream;
} }