mirror of
https://github.com/Kong/insomnia
synced 2024-11-08 14:49:53 +00:00
0a616fba6b
* VCS proof of concept underway! * Stuff * Some things * Replace deprecated Electron makeSingleInstance * Rename `window` variables so not to be confused with window object * Don't unnecessarily update request when URL does not change * Regenerate package-lock * Fix tests + ESLint * Publish - insomnia-app@1.0.49 - insomnia-cookies@0.0.12 - insomnia-httpsnippet@1.16.18 - insomnia-importers@2.0.13 - insomnia-libcurl@0.0.23 - insomnia-prettify@0.1.7 - insomnia-url@0.1.6 - insomnia-xpath@1.0.9 - insomnia-plugin-base64@1.0.6 - insomnia-plugin-cookie-jar@1.0.8 - insomnia-plugin-core-themes@1.0.5 - insomnia-plugin-default-headers@1.1.9 - insomnia-plugin-file@1.0.7 - insomnia-plugin-hash@1.0.7 - insomnia-plugin-jsonpath@1.0.12 - insomnia-plugin-now@1.0.11 - insomnia-plugin-os@1.0.13 - insomnia-plugin-prompt@1.1.9 - insomnia-plugin-request@1.0.18 - insomnia-plugin-response@1.0.16 - insomnia-plugin-uuid@1.0.10 * Broken but w/e * Some tweaks * Big refactor. Create local snapshots and push done * POC merging and a lot of improvements * Lots of work done on initial UI/UX * Fix old tests * Atomic writes and size-based batches * Update StageEntry definition once again to be better * Factor out GraphQL query logic * Merge algorithm, history modal, other minor things * Fix test * Merge, checkout, revert w/ user changes now work * Force UI to refresh when switching branches changes active request * Rough draft pull() and some cleanup * E2EE stuff and some refactoring * Add ability to share project with team and fixed tests * VCS now created in root component and better remote project handling * Remove unused definition * Publish - insomnia-account@0.0.2 - insomnia-app@1.1.1 - insomnia-cookies@0.0.14 - insomnia-httpsnippet@1.16.20 - insomnia-importers@2.0.15 - insomnia-libcurl@0.0.25 - insomnia-prettify@0.1.9 - insomnia-sync@0.0.2 - insomnia-url@0.1.8 - insomnia-xpath@1.0.11 - insomnia-plugin-base64@1.0.8 - insomnia-plugin-cookie-jar@1.0.10 - insomnia-plugin-core-themes@1.0.7 - insomnia-plugin-file@1.0.9 - insomnia-plugin-hash@1.0.9 - insomnia-plugin-jsonpath@1.0.14 - insomnia-plugin-now@1.0.13 - insomnia-plugin-os@1.0.15 - insomnia-plugin-prompt@1.1.11 - insomnia-plugin-request@1.0.20 - insomnia-plugin-response@1.0.18 - insomnia-plugin-uuid@1.0.12 * Move some deps around * Fix Flow errors * Update package.json * Fix eslint errors * Fix tests * Update deps * bootstrap insomnia-sync * TRy fixing appveyor * Try something else * Bump lerna * try powershell * Try again * Fix imports * Fixed errors * sync types refactor * Show remote projects in workspace dropdown * Improved pulling of non-local workspaces * Loading indicators and some tweaks * Clean up sync staging modal * Some sync improvements: - No longer store stage - Upgrade Electron - Sync UI/UX improvements * Fix snyc tests * Upgraded deps and hot loader tweaks (it's broken for some reason) * Fix tests * Branches dialog, network refactoring, some tweaks * Fixed merging when other branch is empty * A bunch of small fixes from real testing * Fixed pull merge logic * Fix tests * Some bug fixes * A few small tweaks * Conflict resolution and other improvements * Fix tests * Add revert changes * Deal with duplicate projects per workspace * Some tweaks and accessibility improvements * Tooltip accessibility * Fix API endpoint * Fix tests * Remove jest dep from insomnia-importers
558 lines
19 KiB
JavaScript
558 lines
19 KiB
JavaScript
import * as sync from '../index';
|
|
import * as models from '../../models';
|
|
import * as db from '../../common/database';
|
|
import * as syncStorage from '../storage';
|
|
import * as network from '../network';
|
|
import { globalBeforeEach } from '../../__jest__/before-each';
|
|
import * as session from '../../account/session';
|
|
import * as crypt from '../../account/crypt';
|
|
|
|
describe('Test push/pull behaviour', () => {
|
|
beforeEach(async () => {
|
|
await globalBeforeEach();
|
|
|
|
// Reset some things
|
|
sync._testReset();
|
|
await _setSessionData();
|
|
await _setupSessionMocks();
|
|
|
|
// Init sync and storage
|
|
const config = { inMemoryOnly: true, autoload: false, filename: null };
|
|
await syncStorage.initDB(config, true);
|
|
|
|
// Add some data
|
|
await models.workspace.create({ _id: 'wrk_1', name: 'Workspace 1' });
|
|
await models.workspace.create({ _id: 'wrk_2', name: 'Workspace 2' });
|
|
await models.request.create({
|
|
_id: 'req_1',
|
|
name: 'Request 1',
|
|
parentId: 'wrk_1',
|
|
});
|
|
await models.request.create({
|
|
_id: 'req_2',
|
|
name: 'Request 2',
|
|
parentId: 'wrk_2',
|
|
});
|
|
|
|
// Create resources, resource groups, and configs
|
|
const workspaces = await models.workspace.all();
|
|
const requests = await models.request.all();
|
|
for (const d of [...workspaces, ...requests]) {
|
|
await sync.getOrCreateResourceForDoc(d);
|
|
}
|
|
});
|
|
|
|
it('Pushes sync mode with and without resource group id', async () => {
|
|
const request = await models.request.getById('req_1');
|
|
const request2 = await models.request.getById('req_2');
|
|
const resourceRequest = await syncStorage.getResourceByDocId(request._id);
|
|
const resourceRequest2 = await syncStorage.getResourceByDocId(request2._id);
|
|
|
|
// Set up sync modes
|
|
await sync.createOrUpdateConfig(resourceRequest.resourceGroupId, {
|
|
syncMode: syncStorage.SYNC_MODE_ON,
|
|
});
|
|
await sync.createOrUpdateConfig(resourceRequest2.resourceGroupId, {
|
|
syncMode: syncStorage.SYNC_MODE_UNSET,
|
|
});
|
|
|
|
await sync.push(); // Push only active configs
|
|
await sync.push(resourceRequest.resourceGroupId); // Force push rg_1
|
|
await sync.push(resourceRequest2.resourceGroupId); // Force push rg_2
|
|
|
|
expect(network.syncPush.mock.calls.length).toBe(3);
|
|
expect(network.syncPush.mock.calls[0][0].length).toBe(2);
|
|
expect(network.syncPush.mock.calls[0][0][0].id).toBe('wrk_1');
|
|
expect(network.syncPush.mock.calls[0][0][1].id).toBe('req_1');
|
|
expect(network.syncPush.mock.calls[1][0].length).toBe(2);
|
|
expect(network.syncPush.mock.calls[1][0][0].id).toBe('wrk_1');
|
|
expect(network.syncPush.mock.calls[1][0][1].id).toBe('req_1');
|
|
expect(network.syncPush.mock.calls[2][0].length).toBe(2);
|
|
expect(network.syncPush.mock.calls[2][0][0].id).toBe('wrk_2');
|
|
expect(network.syncPush.mock.calls[2][0][1].id).toBe('req_2');
|
|
});
|
|
|
|
it('Updates dirty flag for push response', async () => {
|
|
const request = await models.request.getById('req_1');
|
|
const resourceRequest = await syncStorage.getResourceByDocId(request._id);
|
|
await sync.createOrUpdateConfig(resourceRequest.resourceGroupId, {
|
|
syncMode: syncStorage.SYNC_MODE_ON,
|
|
});
|
|
|
|
network.syncPush.mockReturnValueOnce({
|
|
updated: [],
|
|
created: [{ id: request._id, version: 'new-version' }],
|
|
removed: [],
|
|
conflicts: [],
|
|
});
|
|
|
|
const resourceBefore = await syncStorage.getResourceByDocId(request._id);
|
|
await sync.push(resourceRequest.resourceGroupId);
|
|
const resourceAfter = await syncStorage.getResourceByDocId(request._id);
|
|
|
|
expect(network.syncPush.mock.calls.length).toBe(1);
|
|
expect(network.syncPush.mock.calls[0][0].length).toBe(2);
|
|
expect(network.syncPush.mock.calls[0][0][0].id).toBe('wrk_1');
|
|
expect(network.syncPush.mock.calls[0][0][1].id).toBe('req_1');
|
|
expect(resourceBefore.dirty).toBe(true);
|
|
expect(resourceAfter.dirty).toBe(false);
|
|
});
|
|
|
|
it('Updates resources for pull response', async () => {
|
|
const request = await models.request.getById('req_1');
|
|
const request2 = await models.request.getById('req_2');
|
|
const requestNew = Object.assign({}, request, {
|
|
_id: 'req_new',
|
|
name: 'New Request',
|
|
});
|
|
const resourceBefore = await syncStorage.getResourceByDocId(request._id);
|
|
const resource2Before = await syncStorage.getResourceByDocId(requestNew._id);
|
|
await sync.createOrUpdateConfig(resourceBefore.resourceGroupId, {
|
|
syncMode: syncStorage.SYNC_MODE_ON,
|
|
});
|
|
const updatedRequest = Object.assign({}, request, {
|
|
name: 'Request Updated',
|
|
});
|
|
const updatedResource = Object.assign({}, resourceBefore, {
|
|
version: 'ver1',
|
|
encContent: await sync.encryptDoc(resourceBefore.resourceGroupId, updatedRequest),
|
|
});
|
|
const createdResourceNew = Object.assign({}, resourceBefore, {
|
|
id: requestNew._id,
|
|
resourceGroupId: 'rg_1',
|
|
encContent: await sync.encryptDoc(resourceBefore.resourceGroupId, requestNew),
|
|
});
|
|
|
|
network.syncPull.mockReturnValueOnce({
|
|
updatedResources: [updatedResource],
|
|
createdResources: [createdResourceNew],
|
|
idsToPush: [],
|
|
idsToRemove: ['req_2'],
|
|
});
|
|
|
|
// Pull and get docs/resources
|
|
await sync.pull(resourceBefore.resourceGroupId);
|
|
const requestAfter = await models.request.getById(request._id);
|
|
const request2After = await models.request.getById(request2._id);
|
|
const requestNewAfter = await models.request.getById('req_new');
|
|
const resourceAfter = await syncStorage.getResourceByDocId(
|
|
request._id,
|
|
resourceBefore.resourceGroupId,
|
|
);
|
|
const resource2After = await syncStorage.getResourceByDocId(request2._id);
|
|
const resourceNewAfter = await syncStorage.getResourceByDocId(requestNewAfter._id);
|
|
|
|
// Assert
|
|
expect(resourceBefore.version).toBe('__NO_VERSION__');
|
|
expect(resourceAfter.version).toBe(updatedResource.version);
|
|
expect(resourceBefore.dirty).toBe(true);
|
|
expect(resource2Before).toBe(null);
|
|
expect(resourceAfter.dirty).toBe(false);
|
|
expect(resource2After.removed).toBe(true);
|
|
expect(requestAfter.name).toBe('Request Updated');
|
|
expect(request2After).toBe(null);
|
|
expect(resourceNewAfter).not.toBe(null);
|
|
});
|
|
|
|
it('Conflict: local version wins on modified before', async () => {
|
|
const requestClient = await models.request.getById('req_1');
|
|
const requestServer = Object.assign({}, requestClient, {
|
|
name: 'Server Request',
|
|
});
|
|
const resourceRequest = await syncStorage.getResourceByDocId(requestClient._id);
|
|
const resourceConflict = Object.assign({}, resourceRequest, {
|
|
version: 'ver-2',
|
|
encContent: await sync.encryptDoc(resourceRequest.resourceGroupId, requestServer),
|
|
lastEdited: resourceRequest.lastEdited - 1000, // Same edited time
|
|
});
|
|
|
|
network.syncPush.mockReturnValueOnce({
|
|
updated: [],
|
|
created: [],
|
|
removed: [],
|
|
conflicts: [resourceConflict],
|
|
});
|
|
|
|
await sync.push(resourceRequest.resourceGroupId);
|
|
const resourceAfter = await syncStorage.getResourceByDocId(
|
|
requestClient._id,
|
|
resourceRequest.resourceGroupId,
|
|
);
|
|
const requestAfter = await models.request.getById(requestClient._id);
|
|
|
|
// Assert
|
|
expect(network.syncPush.mock.calls.length).toBe(1);
|
|
expect(network.syncPush.mock.calls[0][0].length).toBe(2);
|
|
// Even when local wins, local resource gets the remove resource version
|
|
expect(resourceAfter.version).toBe(resourceConflict.version);
|
|
// Local resource gets marked as dirty so it's pushed right away
|
|
expect(resourceAfter.dirty).toBe(true);
|
|
// Local db should not be changed since the local won
|
|
expect(requestAfter).toEqual(requestClient);
|
|
});
|
|
|
|
it('Conflict: local version wins on modified tie', async () => {
|
|
const requestClient = await models.request.getById('req_1');
|
|
const requestServer = Object.assign({}, requestClient, {
|
|
name: 'Server Request',
|
|
});
|
|
const resourceRequest = await syncStorage.getResourceByDocId(requestClient._id);
|
|
const resourceConflict = Object.assign({}, resourceRequest, {
|
|
version: 'ver-2',
|
|
encContent: await sync.encryptDoc(resourceRequest.resourceGroupId, requestServer),
|
|
lastEdited: resourceRequest.lastEdited, // Same edited time
|
|
});
|
|
|
|
network.syncPush.mockReturnValueOnce({
|
|
updated: [],
|
|
created: [],
|
|
removed: [],
|
|
conflicts: [resourceConflict],
|
|
});
|
|
|
|
await sync.push(resourceRequest.resourceGroupId);
|
|
const resourceAfter = await syncStorage.getResourceByDocId(
|
|
requestClient._id,
|
|
resourceRequest.resourceGroupId,
|
|
);
|
|
const requestAfter = await models.request.getById(requestClient._id);
|
|
|
|
// Assert
|
|
expect(network.syncPush.mock.calls.length).toBe(1);
|
|
expect(network.syncPush.mock.calls[0][0].length).toBe(2);
|
|
// Even when local wins, local resource gets the remove resource version
|
|
expect(resourceAfter.version).toBe(resourceConflict.version);
|
|
// Local resource gets marked as dirty so it's pushed right away
|
|
expect(resourceAfter.dirty).toBe(true);
|
|
// Local db should not be changed since the local won
|
|
expect(requestAfter).toEqual(requestClient);
|
|
});
|
|
|
|
it('Conflict: server version wins if modified after', async () => {
|
|
const requestClient = await models.request.getById('req_1');
|
|
const requestServer = Object.assign({}, requestClient, {
|
|
name: 'Server Request',
|
|
});
|
|
const resourceRequest = await syncStorage.getResourceByDocId(requestClient._id);
|
|
const resourceConflict = Object.assign({}, resourceRequest, {
|
|
version: 'ver-2',
|
|
encContent: await sync.encryptDoc(resourceRequest.resourceGroupId, requestServer),
|
|
lastEdited: resourceRequest.lastEdited + 1000,
|
|
});
|
|
|
|
network.syncPush.mockReturnValueOnce({
|
|
updated: [],
|
|
created: [],
|
|
removed: [],
|
|
conflicts: [resourceConflict],
|
|
});
|
|
|
|
await sync.push(resourceRequest.resourceGroupId);
|
|
const resourceAfter = await syncStorage.getResourceByDocId(
|
|
requestClient._id,
|
|
resourceRequest.resourceGroupId,
|
|
);
|
|
const requestAfter = await models.request.getById(requestClient._id);
|
|
|
|
// Assert
|
|
expect(network.syncPush.mock.calls.length).toBe(1);
|
|
expect(network.syncPush.mock.calls[0][0].length).toBe(2);
|
|
expect(resourceAfter.lastEdited).toBeGreaterThan(resourceRequest.lastEdited);
|
|
expect(resourceAfter.version).toBe(resourceConflict.version);
|
|
// Local resource gets marked as dirty so it's pushed right away
|
|
expect(resourceAfter.dirty).toBe(false);
|
|
expect(requestAfter.name).toBe('Server Request');
|
|
expect(requestAfter.modified).toBe(requestServer.modified);
|
|
});
|
|
});
|
|
|
|
describe('Integration tests for creating Resources and pushing', () => {
|
|
beforeEach(async () => {
|
|
await globalBeforeEach();
|
|
|
|
// Reset some things
|
|
await _setSessionData();
|
|
sync._testReset();
|
|
|
|
// Mock some things
|
|
await _setupSessionMocks();
|
|
jest.useFakeTimers();
|
|
|
|
// Init storage
|
|
const config = { inMemoryOnly: true, autoload: false, filename: null };
|
|
await syncStorage.initDB(config, true);
|
|
|
|
// Add some data
|
|
await models.workspace.create({
|
|
_id: 'wrk_empty',
|
|
name: 'Workspace Empty',
|
|
});
|
|
await models.workspace.create({ _id: 'wrk_1', name: 'Workspace 1' });
|
|
await models.request.create({
|
|
_id: 'req_1',
|
|
name: 'Request 1',
|
|
parentId: 'wrk_1',
|
|
});
|
|
await models.request.create({
|
|
_id: 'req_2',
|
|
name: 'Request 2',
|
|
parentId: 'wrk_1',
|
|
});
|
|
await models.request.create({
|
|
_id: 'req_3',
|
|
name: 'Request 3',
|
|
parentId: 'wrk_1',
|
|
});
|
|
await models.environment.create({
|
|
_id: 'env_2',
|
|
name: 'Env Prv',
|
|
parentId: 'wrk_1',
|
|
isPrivate: true,
|
|
});
|
|
|
|
// Flush changes just to be sure they won't affect our tests
|
|
await db.flushChanges();
|
|
await sync.writePendingChanges();
|
|
|
|
// Assert that all our new models were created
|
|
expect((await models.workspace.all()).length).toBe(2);
|
|
expect((await models.request.all()).length).toBe(3);
|
|
expect((await models.environment.all()).length).toBe(1);
|
|
expect((await models.cookieJar.all()).length).toBe(0);
|
|
|
|
// Assert that initializing sync will create the initial resources
|
|
expect((await syncStorage.allConfigs()).length).toBe(0);
|
|
expect((await syncStorage.allResources()).length).toBe(0);
|
|
const promise = sync.init();
|
|
jest.runOnlyPendingTimers();
|
|
await promise;
|
|
expect((await syncStorage.allConfigs()).length).toBe(2);
|
|
expect((await syncStorage.allResources()).length).toBe(5);
|
|
|
|
// Mark all configs as auto sync
|
|
const configs = await syncStorage.allConfigs();
|
|
for (const config of configs) {
|
|
await syncStorage.updateConfig(config, {
|
|
syncMode: syncStorage.SYNC_MODE_ON,
|
|
});
|
|
}
|
|
|
|
// Do initial push
|
|
await sync.push();
|
|
|
|
// Reset mocks once again before tests
|
|
await _setupSessionMocks();
|
|
});
|
|
|
|
it('Resources created on DB change', async () => {
|
|
// Fetch the workspace and create a new request
|
|
await db.bufferChanges();
|
|
await models.request.create({
|
|
_id: 'req_t',
|
|
url: 'https://google.com',
|
|
parentId: 'wrk_1',
|
|
});
|
|
|
|
await db.flushChanges();
|
|
await sync.writePendingChanges();
|
|
await sync.push();
|
|
|
|
// Push changes and get resource
|
|
const resource = await syncStorage.getResourceByDocId('req_t');
|
|
|
|
// Assert
|
|
expect((await syncStorage.allConfigs()).length).toBe(2);
|
|
expect((await syncStorage.allResources()).length).toBe(6);
|
|
expect(_decryptResource(resource).url).toBe('https://google.com');
|
|
expect(resource.removed).toBe(false);
|
|
|
|
expect(network.syncPush.mock.calls.length).toBe(1);
|
|
expect(network.syncPush.mock.calls[0][0].length).toBe(6);
|
|
|
|
expect(network.syncPull.mock.calls).toEqual([]);
|
|
});
|
|
|
|
it('Resources revived on DB change', async () => {
|
|
// Fetch the workspace and create a new request
|
|
await db.bufferChanges();
|
|
const request = await models.request.create({
|
|
_id: 'req_t',
|
|
name: 'Original Request',
|
|
parentId: 'wrk_1',
|
|
});
|
|
await db.flushChanges();
|
|
await sync.writePendingChanges();
|
|
await sync.push();
|
|
|
|
// Mark resource as removed
|
|
const originalResource = await syncStorage.getResourceByDocId('req_t');
|
|
const updatedResource = await syncStorage.updateResource(originalResource, {
|
|
removed: true,
|
|
});
|
|
|
|
// Update it and push it again
|
|
await db.bufferChanges();
|
|
await models.request.update(request, { name: 'New Name' });
|
|
await db.flushChanges();
|
|
await sync.writePendingChanges();
|
|
await sync.push();
|
|
const finalResource = await syncStorage.getResourceByDocId('req_t');
|
|
|
|
// Assert
|
|
expect(originalResource.removed).toBe(false);
|
|
expect(updatedResource.removed).toBe(true);
|
|
expect(finalResource.removed).toBe(false);
|
|
});
|
|
|
|
it('Resources update on DB change', async () => {
|
|
// Create, update a request, and fetch it's resource
|
|
const request = await models.request.getById('req_1');
|
|
const resource = await syncStorage.getResourceByDocId(request._id);
|
|
await db.bufferChanges();
|
|
const updatedRequest = await models.request.update(request, {
|
|
name: 'New Name',
|
|
});
|
|
|
|
// Drain and fetch new resource
|
|
await db.flushChanges();
|
|
await sync.writePendingChanges();
|
|
await sync.push();
|
|
const updatedResource = await syncStorage.getResourceByDocId(request._id);
|
|
|
|
// Assert
|
|
expect(request.name).toBe('Request 1');
|
|
expect(_decryptResource(resource).name).toBe('Request 1');
|
|
expect(updatedRequest.name).toBe('New Name');
|
|
expect(_decryptResource(updatedResource).name).toBe('New Name');
|
|
expect(resource.removed).toBe(false);
|
|
|
|
expect(network.syncPush.mock.calls.length).toBe(1);
|
|
expect(network.syncPush.mock.calls[0][0].length).toBe(5);
|
|
|
|
expect(network.syncPull.mock.calls).toEqual([]);
|
|
});
|
|
|
|
it('Resources removed on DB change', async () => {
|
|
// Create, update a request, and fetch it's resource
|
|
const request = await models.request.getById('req_1');
|
|
const resource = await syncStorage.getResourceByDocId(request._id);
|
|
await db.bufferChanges();
|
|
await models.request.remove(request);
|
|
|
|
// Drain and fetch new resource
|
|
await db.flushChanges();
|
|
await sync.writePendingChanges();
|
|
await sync.push();
|
|
const updatedResource = await syncStorage.getResourceByDocId(request._id);
|
|
|
|
// Assert
|
|
expect(resource.removed).toBe(false);
|
|
expect(updatedResource.removed).toBe(true);
|
|
|
|
expect(network.syncPush.mock.calls.length).toBe(1);
|
|
expect(network.syncPush.mock.calls[0][0].length).toBe(5);
|
|
|
|
expect(network.syncPull.mock.calls).toEqual([]);
|
|
});
|
|
});
|
|
|
|
// ~~~~~~~ //
|
|
// Helpers //
|
|
// ~~~~~~~ //
|
|
|
|
function _decryptResource(resource) {
|
|
const message = JSON.parse(resource.encContent);
|
|
const fakeKey = 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa';
|
|
const docJSON = crypt.decryptAES(fakeKey, message);
|
|
return JSON.parse(docJSON);
|
|
}
|
|
|
|
async function _setSessionData() {
|
|
const symmetricKey = {
|
|
alg: 'A256GCM',
|
|
ext: true,
|
|
k: '3-QU2OcQcpSyFIoL8idgclbImP3M8Y2d0oVAca3Vl4g',
|
|
key_ops: ['encrypt', 'decrypt'],
|
|
kty: 'oct',
|
|
};
|
|
|
|
const publicKey = {
|
|
alg: 'RSA-OAEP-256',
|
|
e: 'AQAB',
|
|
ext: true,
|
|
key_ops: ['encrypt'],
|
|
kty: 'RSA',
|
|
n: 'aaaa',
|
|
};
|
|
|
|
const { privateKey } = await crypt.generateKeyPairJWK();
|
|
const encPrivateKey = {
|
|
ad: '',
|
|
d: Buffer.from(JSON.stringify(privateKey)).toString('hex'),
|
|
iv: '968f1d810efdaec58f9e313e',
|
|
t: '0e87a2e57a198ca79cb99585fe9c244a',
|
|
};
|
|
|
|
// Setup mocks and stuff
|
|
session.setSessionData(
|
|
'ses_123',
|
|
'acct_123',
|
|
'Tammy',
|
|
'Tester',
|
|
'gschier1990@gmail.com',
|
|
symmetricKey,
|
|
publicKey,
|
|
encPrivateKey,
|
|
);
|
|
}
|
|
|
|
async function _setupSessionMocks() {
|
|
const resourceGroups = {};
|
|
|
|
network.syncCreateResourceGroup = jest.fn((parentId, name, _) => {
|
|
const id = `rg_${Object.keys(resourceGroups).length + 1}`;
|
|
|
|
// Generate a public key and use a symmetric equal to it's Id for
|
|
// convenience
|
|
const publicKey = session.getPublicKey();
|
|
const symmetricKeyStr = JSON.stringify({ k: id });
|
|
const encSymmetricKey = crypt.encryptRSAWithJWK(publicKey, symmetricKeyStr);
|
|
|
|
// Store the resource group and return it
|
|
resourceGroups[id] = Object.assign(
|
|
{},
|
|
{ id, encSymmetricKey },
|
|
{
|
|
parentResourceId: parentId,
|
|
name: name,
|
|
encSymmetricKey: encSymmetricKey,
|
|
},
|
|
);
|
|
return resourceGroups[id];
|
|
});
|
|
|
|
network.syncGetResourceGroup = jest.fn(id => {
|
|
if (resourceGroups[id]) {
|
|
return resourceGroups[id];
|
|
}
|
|
|
|
const err = new Error(`Not Found for ${id}`);
|
|
err.statusCode = 404;
|
|
throw err;
|
|
});
|
|
|
|
network.syncPull = jest.fn(body => ({
|
|
updatedResources: [],
|
|
createdResources: [],
|
|
idsToPush: [],
|
|
idsToRemove: [],
|
|
}));
|
|
|
|
network.syncPush = jest.fn(body => ({
|
|
conflicts: [],
|
|
updated: [],
|
|
created: [],
|
|
removed: [],
|
|
}));
|
|
}
|