mirror of
https://github.com/Kong/insomnia
synced 2024-11-08 06:39:48 +00:00
Base Space export (#3479)
This commit is contained in:
parent
c24d08ad66
commit
9c0f7660dc
@ -58,7 +58,7 @@ describe('exportWorkspacesHAR() and exportRequestsHAR()', () => {
|
|||||||
});
|
});
|
||||||
const includePrivateDocs = true;
|
const includePrivateDocs = true;
|
||||||
// Test export whole workspace.
|
// Test export whole workspace.
|
||||||
const exportWorkspacesJson = await exportWorkspacesHAR(wrk1, includePrivateDocs);
|
const exportWorkspacesJson = await exportWorkspacesHAR([wrk1], includePrivateDocs);
|
||||||
const exportWorkspacesData = JSON.parse(exportWorkspacesJson);
|
const exportWorkspacesData = JSON.parse(exportWorkspacesJson);
|
||||||
expect(exportWorkspacesData).toMatchObject({
|
expect(exportWorkspacesData).toMatchObject({
|
||||||
log: {
|
log: {
|
||||||
@ -171,7 +171,7 @@ describe('exportWorkspacesHAR() and exportRequestsHAR()', () => {
|
|||||||
activeEnvironmentId: env2Private._id,
|
activeEnvironmentId: env2Private._id,
|
||||||
});
|
});
|
||||||
const includePrivateDocs = false;
|
const includePrivateDocs = false;
|
||||||
const json = await exportWorkspacesHAR(null, includePrivateDocs);
|
const json = await exportWorkspacesHAR([], includePrivateDocs);
|
||||||
const data = JSON.parse(json);
|
const data = JSON.parse(json);
|
||||||
expect(data).toMatchObject({
|
expect(data).toMatchObject({
|
||||||
log: {
|
log: {
|
||||||
@ -259,8 +259,8 @@ describe('export', () => {
|
|||||||
parentId: eBase._id,
|
parentId: eBase._id,
|
||||||
});
|
});
|
||||||
// Test export whole workspace.
|
// Test export whole workspace.
|
||||||
const exportedWorkspacesJson = await exportWorkspacesData(null, false, 'json');
|
const exportedWorkspacesJson = await exportWorkspacesData([], false, 'json');
|
||||||
const exportedWorkspacesYaml = await exportWorkspacesData(null, false, 'yaml');
|
const exportedWorkspacesYaml = await exportWorkspacesData([], false, 'yaml');
|
||||||
const exportWorkspacesDataJson = JSON.parse(exportedWorkspacesJson);
|
const exportWorkspacesDataJson = JSON.parse(exportedWorkspacesJson);
|
||||||
const exportWorkspacesDataYaml = YAML.parse(exportedWorkspacesYaml);
|
const exportWorkspacesDataYaml = YAML.parse(exportedWorkspacesYaml);
|
||||||
// Ensure JSON is the same as YAML
|
// Ensure JSON is the same as YAML
|
||||||
@ -417,7 +417,7 @@ describe('export', () => {
|
|||||||
isPrivate: true,
|
isPrivate: true,
|
||||||
parentId: eBase._id,
|
parentId: eBase._id,
|
||||||
});
|
});
|
||||||
const result = await exportWorkspacesData(w, false, 'json');
|
const result = await exportWorkspacesData([w], false, 'json');
|
||||||
expect(JSON.parse(result)).toEqual({
|
expect(JSON.parse(result)).toEqual({
|
||||||
_type: 'export',
|
_type: 'export',
|
||||||
__export_format: 4,
|
__export_format: 4,
|
||||||
|
@ -25,7 +25,7 @@ import { isRequest } from '../models/request';
|
|||||||
import { isRequestGroup } from '../models/request-group';
|
import { isRequestGroup } from '../models/request-group';
|
||||||
import { isProtoDirectory } from '../models/proto-directory';
|
import { isProtoDirectory } from '../models/proto-directory';
|
||||||
import { isProtoFile } from '../models/proto-file';
|
import { isProtoFile } from '../models/proto-file';
|
||||||
import { isWorkspace } from '../models/workspace';
|
import { isWorkspace, Workspace } from '../models/workspace';
|
||||||
import { isApiSpec } from '../models/api-spec';
|
import { isApiSpec } from '../models/api-spec';
|
||||||
import { isCookieJar } from '../models/cookie-jar';
|
import { isCookieJar } from '../models/cookie-jar';
|
||||||
import { isEnvironment } from '../models/environment';
|
import { isEnvironment } from '../models/environment';
|
||||||
@ -34,22 +34,22 @@ import { isUnitTest } from '../models/unit-test';
|
|||||||
|
|
||||||
const EXPORT_FORMAT = 4;
|
const EXPORT_FORMAT = 4;
|
||||||
|
|
||||||
async function getDocWithDescendants(
|
const getDocWithDescendants = (includePrivateDocs = false) => async (parentDoc: BaseModel | null) => {
|
||||||
parentDoc: BaseModel | null = null,
|
|
||||||
includePrivateDocs = false,
|
|
||||||
) {
|
|
||||||
const docs = await db.withDescendants(parentDoc);
|
const docs = await db.withDescendants(parentDoc);
|
||||||
return docs.filter(
|
return docs.filter(
|
||||||
// Don't include if private, except if we want to
|
// Don't include if private, except if we want to
|
||||||
doc => !doc?.isPrivate || includePrivateDocs,
|
doc => !doc?.isPrivate || includePrivateDocs,
|
||||||
);
|
);
|
||||||
}
|
};
|
||||||
|
|
||||||
export async function exportWorkspacesHAR(
|
export async function exportWorkspacesHAR(
|
||||||
model: BaseModel | null = null,
|
workspaces: Workspace[],
|
||||||
includePrivateDocs = false,
|
includePrivateDocs = false,
|
||||||
) {
|
) {
|
||||||
const docs = await getDocWithDescendants(model, includePrivateDocs);
|
// regarding `[null]`, see the comment here in `exportWorkspacesData`
|
||||||
|
const rootDocs = workspaces.length === 0 ? [null] : workspaces;
|
||||||
|
const promises = rootDocs.map(getDocWithDescendants(includePrivateDocs));
|
||||||
|
const docs = (await Promise.all(promises)).flat();
|
||||||
const requests = docs.filter(isRequest);
|
const requests = docs.filter(isRequest);
|
||||||
return exportRequestsHAR(requests, includePrivateDocs);
|
return exportRequestsHAR(requests, includePrivateDocs);
|
||||||
}
|
}
|
||||||
@ -118,11 +118,14 @@ export async function exportRequestsHAR(
|
|||||||
}
|
}
|
||||||
|
|
||||||
export async function exportWorkspacesData(
|
export async function exportWorkspacesData(
|
||||||
parentDoc: BaseModel | null,
|
workspaces: Workspace[],
|
||||||
includePrivateDocs: boolean,
|
includePrivateDocs: boolean,
|
||||||
format: 'json' | 'yaml',
|
format: 'json' | 'yaml',
|
||||||
) {
|
) {
|
||||||
const docs = await getDocWithDescendants(parentDoc, includePrivateDocs);
|
// Semantically, if an empty array is passed, then nothing will be returned. What an empty array really signifies is "no parent", which, at the database layer is the same as "parentId === null", hence we add null in ourselves.
|
||||||
|
const rootDocs = workspaces.length === 0 ? [null] : workspaces;
|
||||||
|
const promises = rootDocs.map(getDocWithDescendants(includePrivateDocs));
|
||||||
|
const docs = (await Promise.all(promises)).flat();
|
||||||
const requests = docs.filter(doc => isRequest(doc) || isGrpcRequest(doc));
|
const requests = docs.filter(doc => isRequest(doc) || isGrpcRequest(doc));
|
||||||
return exportRequestsData(requests, includePrivateDocs, format);
|
return exportRequestsData(requests, includePrivateDocs, format);
|
||||||
}
|
}
|
||||||
@ -141,11 +144,11 @@ export async function exportRequestsData(
|
|||||||
resources: [],
|
resources: [],
|
||||||
};
|
};
|
||||||
const docs: BaseModel[] = [];
|
const docs: BaseModel[] = [];
|
||||||
const workspaces: BaseModel[] = [];
|
const workspaces: Workspace[] = [];
|
||||||
const mapTypeAndIdToDoc: Record<string, any> = {};
|
const mapTypeAndIdToDoc: Record<string, BaseModel> = {};
|
||||||
|
|
||||||
for (const req of requests) {
|
for (const request of requests) {
|
||||||
const ancestors: BaseModel[] = clone(await db.withAncestors(req));
|
const ancestors = clone<BaseModel[]>(await db.withAncestors(request));
|
||||||
|
|
||||||
for (const ancestor of ancestors) {
|
for (const ancestor of ancestors) {
|
||||||
const key = ancestor.type + '___' + ancestor._id;
|
const key = ancestor.type + '___' + ancestor._id;
|
||||||
@ -164,7 +167,7 @@ export async function exportRequestsData(
|
|||||||
}
|
}
|
||||||
|
|
||||||
for (const workspace of workspaces) {
|
for (const workspace of workspaces) {
|
||||||
const descendants: BaseModel[] = (await db.withDescendants(workspace)).filter(d => {
|
const descendants = (await db.withDescendants(workspace)).filter(d => {
|
||||||
// Only interested in these additional model types.
|
// Only interested in these additional model types.
|
||||||
return (
|
return (
|
||||||
isCookieJar(d) ||
|
isCookieJar(d) ||
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
import * as plugin from '../data';
|
import * as plugin from '../data';
|
||||||
import * as modals from '../../../ui/components/modals';
|
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { globalBeforeEach } from '../../../__jest__/before-each';
|
import { globalBeforeEach } from '../../../__jest__/before-each';
|
||||||
import * as models from '../../../models/index';
|
import * as models from '../../../models/index';
|
||||||
@ -8,21 +7,13 @@ import fs from 'fs';
|
|||||||
import { getAppVersion } from '../../../common/constants';
|
import { getAppVersion } from '../../../common/constants';
|
||||||
import { WorkspaceScopeKeys } from '../../../models/workspace';
|
import { WorkspaceScopeKeys } from '../../../models/workspace';
|
||||||
|
|
||||||
const PLUGIN = {
|
jest.mock('../../../ui/components/modals');
|
||||||
name: 'my-plugin',
|
|
||||||
version: '1.0.0',
|
|
||||||
directory: '/plugins/my-plugin',
|
|
||||||
module: {},
|
|
||||||
};
|
|
||||||
|
|
||||||
describe('init()', () => {
|
describe('init()', () => {
|
||||||
beforeEach(globalBeforeEach);
|
beforeEach(globalBeforeEach);
|
||||||
|
|
||||||
it('initializes correctly', async () => {
|
it('initializes correctly', async () => {
|
||||||
// @ts-expect-error -- TSCONVERSION genuine, plugin.init doesn't take any arguments
|
const { data } = plugin.init();
|
||||||
const { data } = plugin.init({
|
|
||||||
name: PLUGIN,
|
|
||||||
});
|
|
||||||
expect(Object.keys(data)).toEqual(['import', 'export']);
|
expect(Object.keys(data)).toEqual(['import', 'export']);
|
||||||
expect(Object.keys(data.export).sort()).toEqual(['har', 'insomnia']);
|
expect(Object.keys(data.export).sort()).toEqual(['har', 'insomnia']);
|
||||||
expect(Object.keys(data.import).sort()).toEqual(['raw', 'uri']);
|
expect(Object.keys(data.import).sort()).toEqual(['raw', 'uri']);
|
||||||
@ -40,13 +31,10 @@ describe('app.import.*', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('uri', async () => {
|
it('uri', async () => {
|
||||||
// @ts-expect-error -- TSCONVERSION mocking with jest function
|
|
||||||
modals.showModal = jest.fn();
|
|
||||||
const workspace = await models.workspace.getById('wrk_1');
|
const workspace = await models.workspace.getById('wrk_1');
|
||||||
expect(await db.all(models.workspace.type)).toEqual([workspace]);
|
expect(await db.all(models.workspace.type)).toEqual([workspace]);
|
||||||
expect(await db.count(models.request.type)).toBe(0);
|
expect(await db.count(models.request.type)).toBe(0);
|
||||||
// @ts-expect-error -- TSCONVERSION genuine, plugin.init doesn't take any arguments
|
const { data } = plugin.init();
|
||||||
const { data } = plugin.init(PLUGIN);
|
|
||||||
const filename = path.resolve(__dirname, '../__fixtures__/basic-import.json');
|
const filename = path.resolve(__dirname, '../__fixtures__/basic-import.json');
|
||||||
await data.import.uri(`file://${filename}`);
|
await data.import.uri(`file://${filename}`);
|
||||||
const allWorkspaces = await db.all(models.workspace.type);
|
const allWorkspaces = await db.all(models.workspace.type);
|
||||||
@ -91,13 +79,10 @@ describe('app.import.*', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('importRaw', async () => {
|
it('importRaw', async () => {
|
||||||
// @ts-expect-error -- TSCONVERSION mocking with jest function
|
|
||||||
modals.showModal = jest.fn();
|
|
||||||
const workspace = await models.workspace.getById('wrk_1');
|
const workspace = await models.workspace.getById('wrk_1');
|
||||||
expect(await db.all(models.workspace.type)).toEqual([workspace]);
|
expect(await db.all(models.workspace.type)).toEqual([workspace]);
|
||||||
expect(await db.count(models.request.type)).toBe(0);
|
expect(await db.count(models.request.type)).toBe(0);
|
||||||
// @ts-expect-error -- TSCONVERSION genuine, plugin.init doesn't take any arguments
|
const { data } = plugin.init();
|
||||||
const { data } = plugin.init(PLUGIN);
|
|
||||||
const filename = path.resolve(__dirname, '../__fixtures__/basic-import.json');
|
const filename = path.resolve(__dirname, '../__fixtures__/basic-import.json');
|
||||||
await data.import.raw(fs.readFileSync(filename, 'utf8'));
|
await data.import.raw(fs.readFileSync(filename, 'utf8'));
|
||||||
const allWorkspaces = await db.all(models.workspace.type);
|
const allWorkspaces = await db.all(models.workspace.type);
|
||||||
@ -167,10 +152,7 @@ describe('app.export.*', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('insomnia', async () => {
|
it('insomnia', async () => {
|
||||||
// @ts-expect-error -- TSCONVERSION mocking with jest function
|
const { data } = plugin.init();
|
||||||
modals.showModal = jest.fn();
|
|
||||||
// @ts-expect-error -- TSCONVERSION genuine, plugin.init doesn't take any arguments
|
|
||||||
const { data } = plugin.init(PLUGIN);
|
|
||||||
const exported = await data.export.insomnia();
|
const exported = await data.export.insomnia();
|
||||||
const exportedData = JSON.parse(exported);
|
const exportedData = JSON.parse(exported);
|
||||||
expect(typeof exportedData.__export_date).toBe('string');
|
expect(typeof exportedData.__export_date).toBe('string');
|
||||||
@ -219,10 +201,7 @@ describe('app.export.*', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
it('har', async () => {
|
it('har', async () => {
|
||||||
// @ts-expect-error -- TSCONVERSION mocking with jest function
|
const { data } = plugin.init();
|
||||||
modals.showModal = jest.fn();
|
|
||||||
// @ts-expect-error -- TSCONVERSION genuine, plugin.init doesn't take any arguments
|
|
||||||
const { data } = plugin.init(PLUGIN);
|
|
||||||
const exported = await data.export.har();
|
const exported = await data.export.har();
|
||||||
const exportedData = JSON.parse(exported);
|
const exportedData = JSON.parse(exported);
|
||||||
exportedData.log.entries[0].startedDateTime = '2017-11-24T18:12:12.849Z';
|
exportedData.log.entries[0].startedDateTime = '2017-11-24T18:12:12.849Z';
|
||||||
|
@ -9,7 +9,7 @@ interface PluginImportOptions {
|
|||||||
}
|
}
|
||||||
|
|
||||||
interface InsomniaExport {
|
interface InsomniaExport {
|
||||||
workspace?: Workspace | null;
|
workspace?: Workspace;
|
||||||
includePrivate?: boolean;
|
includePrivate?: boolean;
|
||||||
format?: 'json' | 'yaml';
|
format?: 'json' | 'yaml';
|
||||||
}
|
}
|
||||||
@ -39,7 +39,7 @@ export const init = () => ({
|
|||||||
includePrivate,
|
includePrivate,
|
||||||
format,
|
format,
|
||||||
}: InsomniaExport = {}) => exportWorkspacesData(
|
}: InsomniaExport = {}) => exportWorkspacesData(
|
||||||
workspace || null,
|
workspace ? [workspace] : [],
|
||||||
Boolean(includePrivate),
|
Boolean(includePrivate),
|
||||||
format || 'json',
|
format || 'json',
|
||||||
),
|
),
|
||||||
@ -48,7 +48,7 @@ export const init = () => ({
|
|||||||
workspace,
|
workspace,
|
||||||
includePrivate,
|
includePrivate,
|
||||||
}: HarExport = {}) => exportWorkspacesHAR(
|
}: HarExport = {}) => exportWorkspacesHAR(
|
||||||
workspace || null,
|
workspace ? [workspace] : [],
|
||||||
Boolean(includePrivate),
|
Boolean(includePrivate),
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
|
@ -47,7 +47,7 @@ import {
|
|||||||
DEPRECATED_ACTIVITY_INSOMNIA,
|
DEPRECATED_ACTIVITY_INSOMNIA,
|
||||||
isValidActivity,
|
isValidActivity,
|
||||||
} from '../../../common/constants';
|
} from '../../../common/constants';
|
||||||
import { selectSettings } from '../selectors';
|
import { selectSettings, selectWorkspacesForActiveSpace } from '../selectors';
|
||||||
import { getDesignerDataDir } from '../../../common/electron-helpers';
|
import { getDesignerDataDir } from '../../../common/electron-helpers';
|
||||||
import { Settings } from '../../../models/settings';
|
import { Settings } from '../../../models/settings';
|
||||||
import { GrpcRequest } from '../../../models/grpc-request';
|
import { GrpcRequest } from '../../../models/grpc-request';
|
||||||
@ -595,11 +595,12 @@ const writeExportedFileToFileSystem = (filename: string, jsonData: string, onDon
|
|||||||
fs.writeFile(filename, jsonData, {}, onDone);
|
fs.writeFile(filename, jsonData, {}, onDone);
|
||||||
};
|
};
|
||||||
|
|
||||||
export const exportAllToFile = () => async (dispatch: Dispatch) => {
|
export const exportAllToFile = () => async (dispatch: Dispatch, getState) => {
|
||||||
dispatch(loadStart());
|
dispatch(loadStart());
|
||||||
showSelectExportTypeModal({
|
showSelectExportTypeModal({
|
||||||
onCancel: () => { dispatch(loadStop()); },
|
onCancel: () => { dispatch(loadStop()); },
|
||||||
onDone: async selectedFormat => {
|
onDone: async selectedFormat => {
|
||||||
|
const state = getState();
|
||||||
// Check if we want to export private environments.
|
// Check if we want to export private environments.
|
||||||
const environments = await models.environment.all();
|
const environments = await models.environment.all();
|
||||||
|
|
||||||
@ -622,20 +623,22 @@ export const exportAllToFile = () => async (dispatch: Dispatch) => {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const workspaces = selectWorkspacesForActiveSpace(state);
|
||||||
|
|
||||||
let stringifiedExport;
|
let stringifiedExport;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
switch (selectedFormat) {
|
switch (selectedFormat) {
|
||||||
case VALUE_HAR:
|
case VALUE_HAR:
|
||||||
stringifiedExport = await exportWorkspacesHAR(null, exportPrivateEnvironments);
|
stringifiedExport = await exportWorkspacesHAR(workspaces, exportPrivateEnvironments);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case VALUE_YAML:
|
case VALUE_YAML:
|
||||||
stringifiedExport = await exportWorkspacesData(null, exportPrivateEnvironments, 'yaml');
|
stringifiedExport = await exportWorkspacesData(workspaces, exportPrivateEnvironments, 'yaml');
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case VALUE_JSON:
|
case VALUE_JSON:
|
||||||
stringifiedExport = await exportWorkspacesData(null, exportPrivateEnvironments, 'json');
|
stringifiedExport = await exportWorkspacesData(workspaces, exportPrivateEnvironments, 'json');
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
Loading…
Reference in New Issue
Block a user