2016-12-01 00:02:35 +00:00
|
|
|
import {convert} from 'insomnia-importers';
|
2016-11-19 07:11:10 +00:00
|
|
|
import * as db from './database';
|
2016-11-19 03:21:15 +00:00
|
|
|
import * as models from '../models';
|
2016-11-19 07:11:10 +00:00
|
|
|
import {getAppVersion} from './constants';
|
|
|
|
import * as misc from './misc';
|
2017-05-03 17:48:23 +00:00
|
|
|
import {showModal} from '../ui/components/modals/index';
|
|
|
|
import AlertModal from '../ui/components/modals/alert-modal';
|
|
|
|
import * as fetch from './fetch';
|
|
|
|
import fs from 'fs';
|
|
|
|
import {trackEvent} from '../analytics/index';
|
2016-11-19 03:21:15 +00:00
|
|
|
|
2016-11-23 03:07:35 +00:00
|
|
|
const EXPORT_FORMAT = 3;
|
|
|
|
|
2016-11-19 03:21:15 +00:00
|
|
|
const EXPORT_TYPE_REQUEST = 'request';
|
|
|
|
const EXPORT_TYPE_REQUEST_GROUP = 'request_group';
|
|
|
|
const EXPORT_TYPE_WORKSPACE = 'workspace';
|
|
|
|
const EXPORT_TYPE_COOKIE_JAR = 'cookie_jar';
|
|
|
|
const EXPORT_TYPE_ENVIRONMENT = 'environment';
|
|
|
|
|
|
|
|
// If we come across an ID of this form, we will replace it with a new one
|
|
|
|
const REPLACE_ID_REGEX = /^__\w+_\d+__$/;
|
|
|
|
|
|
|
|
const MODELS = {
|
|
|
|
[EXPORT_TYPE_REQUEST]: models.request,
|
|
|
|
[EXPORT_TYPE_REQUEST_GROUP]: models.requestGroup,
|
|
|
|
[EXPORT_TYPE_WORKSPACE]: models.workspace,
|
|
|
|
[EXPORT_TYPE_COOKIE_JAR]: models.cookieJar,
|
2017-03-03 20:09:08 +00:00
|
|
|
[EXPORT_TYPE_ENVIRONMENT]: models.environment
|
2016-11-19 03:21:15 +00:00
|
|
|
};
|
|
|
|
|
2017-05-03 17:48:23 +00:00
|
|
|
export async function importUri (workspaceId, uri) {
|
|
|
|
let rawText;
|
|
|
|
if (uri.match(/^(http|https):\/\//)) {
|
|
|
|
const response = await fetch.rawFetch(uri);
|
|
|
|
rawText = await response.text();
|
|
|
|
} else if (uri.match(/^(file):\/\//)) {
|
|
|
|
const path = uri.replace(/^(file):\/\//, '');
|
|
|
|
rawText = fs.readFileSync(path, 'utf8');
|
|
|
|
} else {
|
|
|
|
throw new Error(`Invalid import URI ${uri}`);
|
|
|
|
}
|
|
|
|
|
|
|
|
const workspace = await models.workspace.getById(workspaceId);
|
|
|
|
const result = await importRaw(workspace, rawText);
|
|
|
|
const {summary, source, error} = result;
|
|
|
|
|
|
|
|
if (error) {
|
|
|
|
showModal(AlertModal, {title: 'Import Failed', message: error});
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
let statements = Object.keys(summary).map(type => {
|
|
|
|
const count = summary[type].length;
|
|
|
|
const name = models.getModelName(type, count);
|
|
|
|
return count === 0 ? null : `${count} ${name}`;
|
|
|
|
}).filter(s => s !== null);
|
|
|
|
|
|
|
|
let message;
|
|
|
|
if (statements.length === 0) {
|
|
|
|
message = 'Nothing was found to import.';
|
|
|
|
} else {
|
|
|
|
message = `You imported ${statements.join(', ')}!`;
|
|
|
|
}
|
|
|
|
showModal(AlertModal, {title: 'Import Succeeded', message});
|
|
|
|
trackEvent('Import', 'Success', source);
|
|
|
|
}
|
|
|
|
|
2016-11-19 07:11:10 +00:00
|
|
|
export async function importRaw (workspace, rawContent, generateNewIds = false) {
|
2016-12-01 00:02:35 +00:00
|
|
|
let results;
|
2016-11-19 03:21:15 +00:00
|
|
|
try {
|
2016-12-01 00:02:35 +00:00
|
|
|
results = convert(rawContent);
|
2016-11-19 03:21:15 +00:00
|
|
|
} catch (e) {
|
2016-12-01 03:54:26 +00:00
|
|
|
console.warn('Failed to import data', e);
|
|
|
|
return {
|
|
|
|
source: 'not found',
|
|
|
|
error: 'No importers found for file',
|
|
|
|
summary: {}
|
|
|
|
};
|
2016-11-19 03:21:15 +00:00
|
|
|
}
|
|
|
|
|
2016-12-01 00:02:35 +00:00
|
|
|
const {data} = results;
|
2017-06-08 18:17:58 +00:00
|
|
|
console.log('IMPORTING', JSON.parse(rawContent));
|
2016-12-01 00:02:35 +00:00
|
|
|
|
2016-12-01 03:54:26 +00:00
|
|
|
// Fetch the base environment in case we need it
|
|
|
|
const baseEnvironment = await models.environment.getOrCreateForWorkspace(workspace);
|
|
|
|
|
2016-11-19 03:21:15 +00:00
|
|
|
// Generate all the ids we may need
|
|
|
|
const generatedIds = {};
|
|
|
|
for (const r of data.resources) {
|
|
|
|
if (generateNewIds || r._id.match(REPLACE_ID_REGEX)) {
|
|
|
|
generatedIds[r._id] = misc.generateId(MODELS[r._type].prefix);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-12-01 03:54:26 +00:00
|
|
|
// Always replace these "constants"
|
2016-11-19 03:21:15 +00:00
|
|
|
generatedIds['__WORKSPACE_ID__'] = workspace._id;
|
2016-12-01 03:54:26 +00:00
|
|
|
generatedIds['__BASE_ENVIRONMENT_ID__'] = baseEnvironment._id;
|
2016-11-19 03:21:15 +00:00
|
|
|
|
2016-11-20 07:43:22 +00:00
|
|
|
// Import everything backwards so they get inserted in the correct order
|
|
|
|
data.resources.reverse();
|
|
|
|
|
|
|
|
const importedDocs = {};
|
|
|
|
for (const model of models.all()) {
|
|
|
|
importedDocs[model.type] = [];
|
|
|
|
}
|
|
|
|
|
2016-11-19 03:21:15 +00:00
|
|
|
for (const resource of data.resources) {
|
|
|
|
// Buffer DB changes
|
|
|
|
// NOTE: Doing it inside here so it's more "scalable"
|
|
|
|
db.bufferChanges(100);
|
|
|
|
|
|
|
|
// Replace null parentIds with current workspace
|
|
|
|
if (!resource.parentId) {
|
|
|
|
resource.parentId = '__WORKSPACE_ID__';
|
|
|
|
}
|
|
|
|
|
|
|
|
// Replace _id if we need to
|
|
|
|
if (generatedIds[resource._id]) {
|
|
|
|
resource._id = generatedIds[resource._id];
|
|
|
|
}
|
|
|
|
|
|
|
|
// Replace newly generated IDs if they exist
|
|
|
|
if (generatedIds[resource.parentId]) {
|
|
|
|
resource.parentId = generatedIds[resource.parentId];
|
|
|
|
}
|
|
|
|
|
|
|
|
const model = MODELS[resource._type];
|
|
|
|
if (!model) {
|
2016-12-03 05:15:38 +00:00
|
|
|
console.warn('Unknown doc type for import', resource._type);
|
|
|
|
continue;
|
2016-11-19 03:21:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
const doc = await model.getById(resource._id);
|
2017-03-03 20:09:08 +00:00
|
|
|
const newDoc = doc ? await model.update(doc, resource) : await model.create(resource);
|
2016-11-19 03:21:15 +00:00
|
|
|
|
2016-11-20 07:43:22 +00:00
|
|
|
importedDocs[newDoc.type].push(newDoc);
|
2016-11-19 03:21:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
db.flushChanges();
|
2016-11-20 07:43:22 +00:00
|
|
|
|
2016-12-01 00:02:35 +00:00
|
|
|
return {
|
|
|
|
source: results.type.id,
|
2016-12-01 03:54:26 +00:00
|
|
|
summary: importedDocs,
|
|
|
|
error: null
|
2016-12-01 00:02:35 +00:00
|
|
|
};
|
2016-11-19 03:21:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
export async function exportJSON (parentDoc = null) {
|
|
|
|
const data = {
|
|
|
|
_type: 'export',
|
2016-11-23 03:07:35 +00:00
|
|
|
__export_format: EXPORT_FORMAT,
|
2016-11-19 03:21:15 +00:00
|
|
|
__export_date: new Date(),
|
|
|
|
__export_source: `insomnia.desktop.app:v${getAppVersion()}`,
|
|
|
|
resources: {}
|
|
|
|
};
|
|
|
|
|
|
|
|
const docs = await db.withDescendants(parentDoc);
|
|
|
|
|
|
|
|
data.resources = docs.filter(d => (
|
2017-02-15 20:32:15 +00:00
|
|
|
!d.isPrivate && (
|
|
|
|
d.type === models.request.type ||
|
|
|
|
d.type === models.requestGroup.type ||
|
|
|
|
d.type === models.workspace.type ||
|
|
|
|
d.type === models.cookieJar.type ||
|
|
|
|
d.type === models.environment.type
|
|
|
|
)
|
2016-11-19 03:21:15 +00:00
|
|
|
)).map(d => {
|
|
|
|
if (d.type === models.workspace.type) {
|
|
|
|
d._type = EXPORT_TYPE_WORKSPACE;
|
|
|
|
} else if (d.type === models.cookieJar.type) {
|
|
|
|
d._type = EXPORT_TYPE_COOKIE_JAR;
|
|
|
|
} else if (d.type === models.environment.type) {
|
|
|
|
d._type = EXPORT_TYPE_ENVIRONMENT;
|
|
|
|
} else if (d.type === models.requestGroup.type) {
|
|
|
|
d._type = EXPORT_TYPE_REQUEST_GROUP;
|
|
|
|
} else if (d.type === models.request.type) {
|
|
|
|
d._type = EXPORT_TYPE_REQUEST;
|
|
|
|
}
|
|
|
|
|
2016-11-23 03:07:35 +00:00
|
|
|
// Delete the things we don't want to export
|
2016-11-19 03:21:15 +00:00
|
|
|
delete d.type;
|
2017-02-15 20:32:15 +00:00
|
|
|
delete d.isPrivate;
|
2016-11-19 03:21:15 +00:00
|
|
|
return d;
|
|
|
|
});
|
|
|
|
|
2016-11-23 03:07:35 +00:00
|
|
|
return JSON.stringify(data, null, '\t');
|
2016-11-19 03:21:15 +00:00
|
|
|
}
|