insomnia/packages/insomnia-app/app/common/import.js

373 lines
12 KiB
JavaScript
Raw Normal View History

// @flow
2018-06-25 17:42:50 +00:00
import { convert } from 'insomnia-importers';
2019-04-18 18:13:12 +00:00
import clone from 'clone';
import * as db from './database';
import * as har from './har';
2018-06-25 17:42:50 +00:00
import type { BaseModel } from '../models/index';
import * as models from '../models/index';
2018-06-25 17:42:50 +00:00
import { getAppVersion } from './constants';
import { showError, showModal } from '../ui/components/modals/index';
import AlertModal from '../ui/components/modals/alert-modal';
import fs from 'fs';
2018-06-25 17:42:50 +00:00
import { fnOrString, generateId } from './misc';
import YAML from 'yaml';
2016-11-19 03:21:15 +00:00
const WORKSPACE_ID_KEY = '__WORKSPACE_ID__';
const BASE_ENVIRONMENT_ID_KEY = '__BASE_ENVIRONMENT_ID__';
const EXPORT_FORMAT = 4;
2016-11-23 03:07:35 +00:00
2016-11-19 03:21:15 +00:00
const EXPORT_TYPE_REQUEST = 'request';
const EXPORT_TYPE_REQUEST_GROUP = 'request_group';
const EXPORT_TYPE_WORKSPACE = 'workspace';
const EXPORT_TYPE_COOKIE_JAR = 'cookie_jar';
const EXPORT_TYPE_ENVIRONMENT = 'environment';
// If we come across an ID of this form, we will replace it with a new one
const REPLACE_ID_REGEX = /__\w+_\d+__/g;
2016-11-19 03:21:15 +00:00
const MODELS = {
[EXPORT_TYPE_REQUEST]: models.request,
[EXPORT_TYPE_REQUEST_GROUP]: models.requestGroup,
[EXPORT_TYPE_WORKSPACE]: models.workspace,
[EXPORT_TYPE_COOKIE_JAR]: models.cookieJar,
[EXPORT_TYPE_ENVIRONMENT]: models.environment,
2016-11-19 03:21:15 +00:00
};
export async function importUri(
getWorkspaceId: () => Promise<string | null>,
uri: string,
): Promise<{
source: string,
error: Error | null,
summary: { [string]: Array<BaseModel> },
}> {
let rawText;
if (uri.match(/^(http|https):\/\//)) {
Version Control (beta) (#1439) * VCS proof of concept underway! * Stuff * Some things * Replace deprecated Electron makeSingleInstance * Rename `window` variables so not to be confused with window object * Don't unnecessarily update request when URL does not change * Regenerate package-lock * Fix tests + ESLint * Publish - insomnia-app@1.0.49 - insomnia-cookies@0.0.12 - insomnia-httpsnippet@1.16.18 - insomnia-importers@2.0.13 - insomnia-libcurl@0.0.23 - insomnia-prettify@0.1.7 - insomnia-url@0.1.6 - insomnia-xpath@1.0.9 - insomnia-plugin-base64@1.0.6 - insomnia-plugin-cookie-jar@1.0.8 - insomnia-plugin-core-themes@1.0.5 - insomnia-plugin-default-headers@1.1.9 - insomnia-plugin-file@1.0.7 - insomnia-plugin-hash@1.0.7 - insomnia-plugin-jsonpath@1.0.12 - insomnia-plugin-now@1.0.11 - insomnia-plugin-os@1.0.13 - insomnia-plugin-prompt@1.1.9 - insomnia-plugin-request@1.0.18 - insomnia-plugin-response@1.0.16 - insomnia-plugin-uuid@1.0.10 * Broken but w/e * Some tweaks * Big refactor. Create local snapshots and push done * POC merging and a lot of improvements * Lots of work done on initial UI/UX * Fix old tests * Atomic writes and size-based batches * Update StageEntry definition once again to be better * Factor out GraphQL query logic * Merge algorithm, history modal, other minor things * Fix test * Merge, checkout, revert w/ user changes now work * Force UI to refresh when switching branches changes active request * Rough draft pull() and some cleanup * E2EE stuff and some refactoring * Add ability to share project with team and fixed tests * VCS now created in root component and better remote project handling * Remove unused definition * Publish - insomnia-account@0.0.2 - insomnia-app@1.1.1 - insomnia-cookies@0.0.14 - insomnia-httpsnippet@1.16.20 - insomnia-importers@2.0.15 - insomnia-libcurl@0.0.25 - insomnia-prettify@0.1.9 - insomnia-sync@0.0.2 - insomnia-url@0.1.8 - insomnia-xpath@1.0.11 - insomnia-plugin-base64@1.0.8 - insomnia-plugin-cookie-jar@1.0.10 - insomnia-plugin-core-themes@1.0.7 - insomnia-plugin-file@1.0.9 - insomnia-plugin-hash@1.0.9 - insomnia-plugin-jsonpath@1.0.14 - insomnia-plugin-now@1.0.13 - insomnia-plugin-os@1.0.15 - insomnia-plugin-prompt@1.1.11 - insomnia-plugin-request@1.0.20 - insomnia-plugin-response@1.0.18 - insomnia-plugin-uuid@1.0.12 * Move some deps around * Fix Flow errors * Update package.json * Fix eslint errors * Fix tests * Update deps * bootstrap insomnia-sync * TRy fixing appveyor * Try something else * Bump lerna * try powershell * Try again * Fix imports * Fixed errors * sync types refactor * Show remote projects in workspace dropdown * Improved pulling of non-local workspaces * Loading indicators and some tweaks * Clean up sync staging modal * Some sync improvements: - No longer store stage - Upgrade Electron - Sync UI/UX improvements * Fix snyc tests * Upgraded deps and hot loader tweaks (it's broken for some reason) * Fix tests * Branches dialog, network refactoring, some tweaks * Fixed merging when other branch is empty * A bunch of small fixes from real testing * Fixed pull merge logic * Fix tests * Some bug fixes * A few small tweaks * Conflict resolution and other improvements * Fix tests * Add revert changes * Deal with duplicate projects per workspace * Some tweaks and accessibility improvements * Tooltip accessibility * Fix API endpoint * Fix tests * Remove jest dep from insomnia-importers
2019-04-18 00:50:03 +00:00
const response = await window.fetch(uri);
rawText = await response.text();
} else if (uri.match(/^(file):\/\//)) {
const path = uri.replace(/^(file):\/\//, '');
rawText = fs.readFileSync(path, 'utf8');
} else {
throw new Error(`Invalid import URI ${uri}`);
}
const result = await importRaw(getWorkspaceId, rawText);
2018-06-25 17:42:50 +00:00
const { summary, error } = result;
if (error) {
showError({
title: 'Failed to import',
error: error.message,
message: 'Import failed',
});
return result;
}
2018-06-25 17:42:50 +00:00
let statements = Object.keys(summary)
.map(type => {
const count = summary[type].length;
const name = models.getModelName(type, count);
return count === 0 ? null : `${count} ${name}`;
})
.filter(s => s !== null);
let message;
if (statements.length === 0) {
message = 'Nothing was found to import.';
} else {
message = `You imported ${statements.join(', ')}!`;
}
2018-06-25 17:42:50 +00:00
showModal(AlertModal, { title: 'Import Succeeded', message });
return result;
}
2018-06-25 17:42:50 +00:00
export async function importRaw(
getWorkspaceId: () => Promise<string | null>,
rawContent: string,
2018-06-25 17:42:50 +00:00
): Promise<{
source: string,
error: Error | null,
summary: { [string]: Array<BaseModel> },
2018-06-25 17:42:50 +00:00
}> {
2016-12-01 00:02:35 +00:00
let results;
2016-11-19 03:21:15 +00:00
try {
results = await convert(rawContent);
} catch (err) {
return {
source: 'not found',
error: err,
summary: {},
};
2016-11-19 03:21:15 +00:00
}
2018-06-25 17:42:50 +00:00
const { data } = results;
2016-12-01 00:02:35 +00:00
2016-11-19 03:21:15 +00:00
// Generate all the ids we may need
2018-06-25 17:42:50 +00:00
const generatedIds: { [string]: string | Function } = {};
2016-11-19 03:21:15 +00:00
for (const r of data.resources) {
for (const key of r._id.match(REPLACE_ID_REGEX) || []) {
generatedIds[key] = generateId(MODELS[r._type].prefix);
2016-11-19 03:21:15 +00:00
}
}
// Contains the ID of the workspace to be used with the import
generatedIds[WORKSPACE_ID_KEY] = async () => {
const workspaceId = await getWorkspaceId();
// First try getting the workspace to overwrite
let workspace = await models.workspace.getById(workspaceId || 'n/a');
// If none provided, create a new workspace
if (workspace === null) {
2018-06-25 17:42:50 +00:00
workspace = await models.workspace.create({ name: 'Imported Workspace' });
}
// Update this fn so it doesn't run again
generatedIds[WORKSPACE_ID_KEY] = workspace._id;
return workspace._id;
};
// Contains the ID of the base environment to be used with the import
generatedIds[BASE_ENVIRONMENT_ID_KEY] = async () => {
const parentId = await fnOrString(generatedIds[WORKSPACE_ID_KEY]);
const baseEnvironment = await models.environment.getOrCreateForWorkspaceId(parentId);
// Update this fn so it doesn't run again
generatedIds[BASE_ENVIRONMENT_ID_KEY] = baseEnvironment._id;
return baseEnvironment._id;
};
2016-11-19 03:21:15 +00:00
2016-11-20 07:43:22 +00:00
// Import everything backwards so they get inserted in the correct order
data.resources.reverse();
const importedDocs = {};
for (const model of models.all()) {
importedDocs[model.type] = [];
}
2016-11-19 03:21:15 +00:00
for (const resource of data.resources) {
// Buffer DB changes
// NOTE: Doing it inside here so it's more "scalable"
await db.bufferChanges(100);
2016-11-19 03:21:15 +00:00
// Replace null parentIds with current workspace
if (!resource.parentId && resource._type !== EXPORT_TYPE_WORKSPACE) {
resource.parentId = WORKSPACE_ID_KEY;
2016-11-19 03:21:15 +00:00
}
// Replace ID placeholders (eg. __WORKSPACE_ID__) with generated values
for (const key of Object.keys(generatedIds)) {
const { parentId, _id } = resource;
2019-08-15 21:06:55 +00:00
if (parentId && parentId.includes(key)) {
resource.parentId = parentId.replace(key, await fnOrString(generatedIds[key]));
}
if (_id && _id.includes(key)) {
resource._id = _id.replace(key, await fnOrString(generatedIds[key]));
}
2016-11-19 03:21:15 +00:00
}
const model: Object = MODELS[resource._type];
2016-11-19 03:21:15 +00:00
if (!model) {
console.warn('Unknown doc type for import', resource._type);
continue;
2016-11-19 03:21:15 +00:00
}
2017-07-27 22:59:07 +00:00
const existingDoc = await model.getById(resource._id);
let newDoc: BaseModel;
2017-07-27 22:59:07 +00:00
if (existingDoc) {
newDoc = await db.docUpdate(existingDoc, resource);
2017-07-27 22:59:07 +00:00
} else {
newDoc = await db.docCreate(model.type, resource);
2017-07-27 22:59:07 +00:00
// Mark as not seen if we created a new workspace from sync
if (newDoc.type === models.workspace.type) {
2018-10-17 16:42:33 +00:00
const workspaceMeta = await models.workspaceMeta.getOrCreateByParentId(newDoc._id);
2018-06-25 17:42:50 +00:00
await models.workspaceMeta.update(workspaceMeta, { hasSeen: false });
2017-07-27 22:59:07 +00:00
}
}
2016-11-19 03:21:15 +00:00
2016-11-20 07:43:22 +00:00
importedDocs[newDoc.type].push(newDoc);
2016-11-19 03:21:15 +00:00
}
await db.flushChanges();
2016-11-20 07:43:22 +00:00
2016-12-01 00:02:35 +00:00
return {
2018-10-17 16:42:33 +00:00
source: results.type && typeof results.type.id === 'string' ? results.type.id : 'unknown',
summary: importedDocs,
error: null,
2016-12-01 00:02:35 +00:00
};
2016-11-19 03:21:15 +00:00
}
export async function exportWorkspacesHAR(
parentDoc: BaseModel | null = null,
includePrivateDocs: boolean = false,
): Promise<string> {
const docs: Array<BaseModel> = await getDocWithDescendants(parentDoc, includePrivateDocs);
const requests: Array<BaseModel> = docs.filter(doc => doc.type === models.request.type);
return exportRequestsHAR(requests, includePrivateDocs);
}
export async function exportRequestsHAR(
requests: Array<BaseModel>,
includePrivateDocs: boolean = false,
): Promise<string> {
const workspaces: Array<BaseModel> = [];
const mapRequestIdToWorkspace: Object = {};
const workspaceLookup: Object = {};
for (const request of requests) {
const ancestors: Array<BaseModel> = await db.withAncestors(request, [
models.workspace.type,
models.requestGroup.type,
]);
const workspace = ancestors.find(ancestor => ancestor.type === models.workspace.type);
mapRequestIdToWorkspace[request._id] = workspace;
if (workspace == null || workspaceLookup.hasOwnProperty(workspace._id)) {
continue;
}
workspaceLookup[workspace._id] = true;
workspaces.push(workspace);
}
const mapWorkspaceIdToEnvironmentId: Object = {};
for (const workspace of workspaces) {
const workspaceMeta = await models.workspaceMeta.getByParentId(workspace._id);
let environmentId = workspaceMeta ? workspaceMeta.activeEnvironmentId : null;
const environment = await models.environment.getById(environmentId || 'n/a');
if (!environment || (environment.isPrivate && !includePrivateDocs)) {
environmentId = 'n/a';
}
mapWorkspaceIdToEnvironmentId[workspace._id] = environmentId;
}
requests = requests.sort((a: Object, b: Object) => (a.metaSortKey < b.metaSortKey ? -1 : 1));
const harRequests: Array<Object> = [];
for (const request of requests) {
const workspace = mapRequestIdToWorkspace[request._id];
if (workspace == null) {
// Workspace not found for request, so don't export it.
continue;
}
const environmentId = mapWorkspaceIdToEnvironmentId[workspace._id];
harRequests.push({
requestId: request._id,
environmentId: environmentId,
});
}
const data = await har.exportHar(harRequests);
return JSON.stringify(data, null, '\t');
}
export async function exportWorkspacesData(
parentDoc: BaseModel | null,
includePrivateDocs: boolean,
format: 'json' | 'yaml',
): Promise<string> {
2018-10-17 16:42:33 +00:00
const docs: Array<BaseModel> = await getDocWithDescendants(parentDoc, includePrivateDocs);
const requests: Array<BaseModel> = docs.filter(doc => doc.type === models.request.type);
return exportRequestsData(requests, includePrivateDocs, format);
2016-11-19 03:21:15 +00:00
}
export async function exportRequestsData(
requests: Array<BaseModel>,
includePrivateDocs: boolean,
format: 'json' | 'yaml',
): Promise<string> {
const data = {
_type: 'export',
__export_format: EXPORT_FORMAT,
__export_date: new Date(),
__export_source: `insomnia.desktop.app:v${getAppVersion()}`,
resources: [],
};
const docs: Array<BaseModel> = [];
const workspaces: Array<BaseModel> = [];
const mapTypeAndIdToDoc: Object = {};
for (const req of requests) {
2019-04-18 18:13:12 +00:00
const ancestors: Array<BaseModel> = clone(await db.withAncestors(req));
for (const ancestor of ancestors) {
const key = ancestor.type + '___' + ancestor._id;
if (mapTypeAndIdToDoc.hasOwnProperty(key)) {
continue;
}
mapTypeAndIdToDoc[key] = ancestor;
docs.push(ancestor);
if (ancestor.type === models.workspace.type) {
workspaces.push(ancestor);
}
}
}
for (const workspace of workspaces) {
const descendants: Array<BaseModel> = (await db.withDescendants(workspace)).filter(d => {
// Only interested in these additional model types.
return d.type === models.cookieJar.type || d.type === models.environment.type;
});
docs.push(...descendants);
}
data.resources = docs
.filter(d => {
// Only export these model types.
if (
!(
d.type === models.request.type ||
d.type === models.requestGroup.type ||
d.type === models.workspace.type ||
d.type === models.cookieJar.type ||
d.type === models.environment.type
)
) {
return false;
}
// BaseModel doesn't have isPrivate, so cast it first.
return !(d: Object).isPrivate || includePrivateDocs;
})
.map((d: Object) => {
if (d.type === models.workspace.type) {
d._type = EXPORT_TYPE_WORKSPACE;
} else if (d.type === models.cookieJar.type) {
d._type = EXPORT_TYPE_COOKIE_JAR;
} else if (d.type === models.environment.type) {
d._type = EXPORT_TYPE_ENVIRONMENT;
} else if (d.type === models.requestGroup.type) {
d._type = EXPORT_TYPE_REQUEST_GROUP;
} else if (d.type === models.request.type) {
d._type = EXPORT_TYPE_REQUEST;
}
// Delete the things we don't want to export
delete d.type;
return d;
});
if (format.toLowerCase() === 'yaml') {
return YAML.stringify(data);
} else if (format.toLowerCase() === 'json') {
return JSON.stringify(data);
} else {
throw new Error(`Invalid export format ${format}. Must be "json" or "yaml"`);
}
}
2018-06-25 17:42:50 +00:00
async function getDocWithDescendants(
parentDoc: BaseModel | null = null,
includePrivateDocs: boolean = false,
): Promise<Array<BaseModel>> {
const docs = await db.withDescendants(parentDoc);
2018-06-25 17:42:50 +00:00
return docs.filter(
d =>
// Don't include if private, except if we want to
!(d: any).isPrivate || includePrivateDocs,
2018-06-25 17:42:50 +00:00
);
}