insomnia/packages/insomnia-app/app/common/import.js

290 lines
8.7 KiB
JavaScript
Raw Normal View History

// @flow
2018-06-25 17:42:50 +00:00
import { convert } from 'insomnia-importers';
import * as db from './database';
import * as har from './har';
2018-06-25 17:42:50 +00:00
import type { BaseModel } from '../models/index';
import * as models from '../models/index';
2018-06-25 17:42:50 +00:00
import { getAppVersion } from './constants';
import { showModal } from '../ui/components/modals/index';
import AlertModal from '../ui/components/modals/alert-modal';
import * as fetch from './fetch';
import fs from 'fs';
2018-06-25 17:42:50 +00:00
import type { Workspace } from '../models/workspace';
import type { Environment } from '../models/environment';
import { fnOrString, generateId } from './misc';
2016-11-19 03:21:15 +00:00
2016-11-23 03:07:35 +00:00
const EXPORT_FORMAT = 3;
2016-11-19 03:21:15 +00:00
const EXPORT_TYPE_REQUEST = 'request';
const EXPORT_TYPE_REQUEST_GROUP = 'request_group';
const EXPORT_TYPE_WORKSPACE = 'workspace';
const EXPORT_TYPE_COOKIE_JAR = 'cookie_jar';
const EXPORT_TYPE_ENVIRONMENT = 'environment';
// If we come across an ID of this form, we will replace it with a new one
const REPLACE_ID_REGEX = /^__\w+_\d+__$/;
const MODELS = {
[EXPORT_TYPE_REQUEST]: models.request,
[EXPORT_TYPE_REQUEST_GROUP]: models.requestGroup,
[EXPORT_TYPE_WORKSPACE]: models.workspace,
[EXPORT_TYPE_COOKIE_JAR]: models.cookieJar,
[EXPORT_TYPE_ENVIRONMENT]: models.environment,
2016-11-19 03:21:15 +00:00
};
2018-10-17 16:42:33 +00:00
export async function importUri(workspaceId: string | null, uri: string): Promise<void> {
let rawText;
if (uri.match(/^(http|https):\/\//)) {
const response = await fetch.rawFetch(uri);
rawText = await response.text();
} else if (uri.match(/^(file):\/\//)) {
const path = uri.replace(/^(file):\/\//, '');
rawText = fs.readFileSync(path, 'utf8');
} else {
throw new Error(`Invalid import URI ${uri}`);
}
const result = await importRaw(workspaceId, rawText);
2018-06-25 17:42:50 +00:00
const { summary, error } = result;
if (error) {
2018-06-25 17:42:50 +00:00
showModal(AlertModal, { title: 'Import Failed', message: error });
return;
}
2018-06-25 17:42:50 +00:00
let statements = Object.keys(summary)
.map(type => {
const count = summary[type].length;
const name = models.getModelName(type, count);
return count === 0 ? null : `${count} ${name}`;
})
.filter(s => s !== null);
let message;
if (statements.length === 0) {
message = 'Nothing was found to import.';
} else {
message = `You imported ${statements.join(', ')}!`;
}
2018-06-25 17:42:50 +00:00
showModal(AlertModal, { title: 'Import Succeeded', message });
}
2018-06-25 17:42:50 +00:00
export async function importRaw(
workspaceId: string | null,
rawContent: string,
generateNewIds: boolean = false,
2018-06-25 17:42:50 +00:00
): Promise<{
source: string,
error: string | null,
summary: { [string]: Array<BaseModel> },
2018-06-25 17:42:50 +00:00
}> {
2016-12-01 00:02:35 +00:00
let results;
2016-11-19 03:21:15 +00:00
try {
results = await convert(rawContent);
2016-11-19 03:21:15 +00:00
} catch (e) {
console.warn('Failed to import data', e);
return {
source: 'not found',
error: 'No importers found for file',
summary: {},
};
2016-11-19 03:21:15 +00:00
}
2018-06-25 17:42:50 +00:00
const { data } = results;
2016-12-01 00:02:35 +00:00
2018-10-17 16:42:33 +00:00
let workspace: Workspace | null = await models.workspace.getById(workspaceId || 'n/a');
// Fetch the base environment in case we need it
let baseEnvironment: Environment | null = await models.environment.getOrCreateForWorkspaceId(
workspaceId || 'n/a',
);
2016-11-19 03:21:15 +00:00
// Generate all the ids we may need
2018-06-25 17:42:50 +00:00
const generatedIds: { [string]: string | Function } = {};
2016-11-19 03:21:15 +00:00
for (const r of data.resources) {
if (generateNewIds || r._id.match(REPLACE_ID_REGEX)) {
generatedIds[r._id] = generateId(MODELS[r._type].prefix);
2016-11-19 03:21:15 +00:00
}
}
// Always replace these "constants"
generatedIds['__WORKSPACE_ID__'] = async () => {
if (!workspace) {
2018-06-25 17:42:50 +00:00
workspace = await models.workspace.create({ name: 'Imported Workspace' });
}
return workspace._id;
};
generatedIds['__BASE_ENVIRONMENT_ID__'] = async () => {
if (!baseEnvironment) {
if (!workspace) {
2018-06-25 17:42:50 +00:00
workspace = await models.workspace.create({
name: 'Imported Workspace',
2018-06-25 17:42:50 +00:00
});
}
2018-10-17 16:42:33 +00:00
baseEnvironment = await models.environment.getOrCreateForWorkspace(workspace);
}
return baseEnvironment._id;
};
2016-11-19 03:21:15 +00:00
2016-11-20 07:43:22 +00:00
// Import everything backwards so they get inserted in the correct order
data.resources.reverse();
const importedDocs = {};
for (const model of models.all()) {
importedDocs[model.type] = [];
}
2016-11-19 03:21:15 +00:00
for (const resource of data.resources) {
// Buffer DB changes
// NOTE: Doing it inside here so it's more "scalable"
await db.bufferChanges(100);
2016-11-19 03:21:15 +00:00
// Replace null parentIds with current workspace
if (!resource.parentId && resource._type !== EXPORT_TYPE_WORKSPACE) {
2016-11-19 03:21:15 +00:00
resource.parentId = '__WORKSPACE_ID__';
}
// Replace _id if we need to
if (generatedIds[resource._id]) {
resource._id = await fnOrString(generatedIds[resource._id]);
2016-11-19 03:21:15 +00:00
}
// Replace newly generated IDs if they exist
if (generatedIds[resource.parentId]) {
resource.parentId = await fnOrString(generatedIds[resource.parentId]);
2016-11-19 03:21:15 +00:00
}
const model: Object = MODELS[resource._type];
2016-11-19 03:21:15 +00:00
if (!model) {
console.warn('Unknown doc type for import', resource._type);
continue;
2016-11-19 03:21:15 +00:00
}
2017-07-27 22:59:07 +00:00
const existingDoc = await model.getById(resource._id);
let newDoc: BaseModel;
2017-07-27 22:59:07 +00:00
if (existingDoc) {
newDoc = await db.docUpdate(existingDoc, resource);
2017-07-27 22:59:07 +00:00
} else {
newDoc = await db.docCreate(model.type, resource);
2017-07-27 22:59:07 +00:00
// Mark as not seen if we created a new workspace from sync
if (newDoc.type === models.workspace.type) {
2018-10-17 16:42:33 +00:00
const workspaceMeta = await models.workspaceMeta.getOrCreateByParentId(newDoc._id);
2018-06-25 17:42:50 +00:00
await models.workspaceMeta.update(workspaceMeta, { hasSeen: false });
2017-07-27 22:59:07 +00:00
}
}
2016-11-19 03:21:15 +00:00
2016-11-20 07:43:22 +00:00
importedDocs[newDoc.type].push(newDoc);
2016-11-19 03:21:15 +00:00
}
await db.flushChanges();
2016-11-20 07:43:22 +00:00
2016-12-01 00:02:35 +00:00
return {
2018-10-17 16:42:33 +00:00
source: results.type && typeof results.type.id === 'string' ? results.type.id : 'unknown',
summary: importedDocs,
error: null,
2016-12-01 00:02:35 +00:00
};
2016-11-19 03:21:15 +00:00
}
2018-06-25 17:42:50 +00:00
export async function exportHAR(
parentDoc: BaseModel | null = null,
includePrivateDocs: boolean = false,
): Promise<string> {
let workspaces;
if (parentDoc) {
workspaces = [parentDoc];
} else {
workspaces = await models.workspace.all();
}
const workspaceEnvironmentLookup = {};
for (let workspace of workspaces) {
2018-10-17 16:42:33 +00:00
const workspaceMeta = await models.workspaceMeta.getByParentId(workspace._id);
let environmentId = workspaceMeta ? workspaceMeta.activeEnvironmentId : null;
const environment = await models.environment.getById(environmentId || 'n/a');
if (!environment || (environment.isPrivate && !includePrivateDocs)) {
environmentId = 'n/a';
}
workspaceEnvironmentLookup[workspace._id] = environmentId;
}
const requests = [];
for (let workspace of workspaces) {
2018-10-17 16:42:33 +00:00
const docs: Array<BaseModel> = await getDocWithDescendants(workspace, includePrivateDocs);
const workspaceRequests = docs
2018-06-25 17:42:50 +00:00
.filter(d => d.type === models.request.type)
.sort((a: Object, b: Object) => (a.metaSortKey < b.metaSortKey ? -1 : 1))
.map((request: BaseModel) => {
return {
requestId: request._id,
environmentId: workspaceEnvironmentLookup[workspace._id],
};
});
requests.push(...workspaceRequests);
}
const data = await har.exportHar(requests);
return JSON.stringify(data, null, '\t');
}
2018-06-25 17:42:50 +00:00
export async function exportJSON(
parentDoc: BaseModel | null = null,
includePrivateDocs: boolean = false,
): Promise<string> {
2016-11-19 03:21:15 +00:00
const data = {
_type: 'export',
2016-11-23 03:07:35 +00:00
__export_format: EXPORT_FORMAT,
2016-11-19 03:21:15 +00:00
__export_date: new Date(),
__export_source: `insomnia.desktop.app:v${getAppVersion()}`,
resources: [],
2016-11-19 03:21:15 +00:00
};
2018-10-17 16:42:33 +00:00
const docs: Array<BaseModel> = await getDocWithDescendants(parentDoc, includePrivateDocs);
2016-11-19 03:21:15 +00:00
data.resources = docs
2018-06-25 17:42:50 +00:00
.filter(
d =>
// Only export these model types
d.type === models.request.type ||
d.type === models.requestGroup.type ||
d.type === models.workspace.type ||
d.type === models.cookieJar.type ||
d.type === models.environment.type,
2018-06-25 17:42:50 +00:00
)
.map((d: Object) => {
if (d.type === models.workspace.type) {
d._type = EXPORT_TYPE_WORKSPACE;
} else if (d.type === models.cookieJar.type) {
d._type = EXPORT_TYPE_COOKIE_JAR;
} else if (d.type === models.environment.type) {
d._type = EXPORT_TYPE_ENVIRONMENT;
} else if (d.type === models.requestGroup.type) {
d._type = EXPORT_TYPE_REQUEST_GROUP;
} else if (d.type === models.request.type) {
d._type = EXPORT_TYPE_REQUEST;
}
// Delete the things we don't want to export
delete d.type;
return d;
});
2016-11-19 03:21:15 +00:00
2016-11-23 03:07:35 +00:00
return JSON.stringify(data, null, '\t');
2016-11-19 03:21:15 +00:00
}
2018-06-25 17:42:50 +00:00
async function getDocWithDescendants(
parentDoc: BaseModel | null = null,
includePrivateDocs: boolean = false,
): Promise<Array<BaseModel>> {
const docs = await db.withDescendants(parentDoc);
2018-06-25 17:42:50 +00:00
return docs.filter(
d =>
// Don't include if private, except if we want to
!(d: any).isPrivate || includePrivateDocs,
2018-06-25 17:42:50 +00:00
);
}