2016-10-21 17:20:36 +00:00
|
|
|
import * as db from '../database';
|
|
|
|
import * as util from './util';
|
|
|
|
import * as crypt from './crypt';
|
|
|
|
import * as session from './session';
|
2016-10-24 23:30:37 +00:00
|
|
|
import * as store from './storage';
|
2016-10-21 17:20:36 +00:00
|
|
|
import Logger from './logger';
|
|
|
|
|
2016-10-21 21:51:41 +00:00
|
|
|
export const FULL_SYNC_INTERVAL = 60E3;
|
2016-10-24 23:52:05 +00:00
|
|
|
export const QUEUE_DEBOUNCE_TIME = 1E3;
|
|
|
|
export const PUSH_DEBOUNCE_TIME = 10E3;
|
2016-10-24 23:30:37 +00:00
|
|
|
export const START_PULL_DELAY = 2E3;
|
|
|
|
export const START_PUSH_DELAY = 1E3;
|
2016-10-21 17:20:36 +00:00
|
|
|
|
|
|
|
const WHITE_LIST = {
|
|
|
|
[db.request.type]: true,
|
|
|
|
[db.requestGroup.type]: true,
|
|
|
|
[db.workspace.type]: true,
|
|
|
|
[db.environment.type]: true,
|
|
|
|
[db.cookieJar.type]: true
|
|
|
|
};
|
|
|
|
|
|
|
|
export const logger = new Logger();
|
|
|
|
|
|
|
|
// TODO: Move this stuff somewhere else
|
|
|
|
const NO_VERSION = '__NO_VERSION__';
|
|
|
|
const resourceGroupCache = {};
|
|
|
|
|
2016-10-24 23:30:37 +00:00
|
|
|
/**
|
|
|
|
* Trigger a full sync cycle. Useful if you don't want to wait for the next
|
|
|
|
* tick.
|
|
|
|
*/
|
|
|
|
export async function triggerSync () {
|
2016-10-21 17:20:36 +00:00
|
|
|
await initSync();
|
2016-10-24 23:30:37 +00:00
|
|
|
await push();
|
|
|
|
await pull();
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
let isInitialized = false;
|
|
|
|
export async function initSync () {
|
|
|
|
const settings = await db.settings.getOrCreate();
|
|
|
|
if (!settings.optSyncBeta) {
|
|
|
|
logger.debug('Not enabled');
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (isInitialized) {
|
|
|
|
logger.debug('Already enabled');
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
db.onChange(changes => {
|
2016-10-24 23:30:37 +00:00
|
|
|
for (const [event, doc, fromSync] of changes) {
|
2016-10-21 17:20:36 +00:00
|
|
|
if (!WHITE_LIST[doc.type]) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2016-10-24 23:30:37 +00:00
|
|
|
if (fromSync) {
|
|
|
|
// Change was triggered from sync, so do nothing.
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2016-10-21 17:20:36 +00:00
|
|
|
// Make sure it happens async
|
2016-10-24 23:30:37 +00:00
|
|
|
process.nextTick(() => _queueChange(event, doc, fromSync));
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
|
|
|
});
|
|
|
|
|
2016-10-24 23:30:37 +00:00
|
|
|
setTimeout(pull, START_PULL_DELAY);
|
|
|
|
setTimeout(push, START_PUSH_DELAY);
|
|
|
|
setInterval(pull, FULL_SYNC_INTERVAL);
|
2016-10-21 17:20:36 +00:00
|
|
|
isInitialized = true;
|
|
|
|
logger.debug('Initialized');
|
|
|
|
}
|
|
|
|
|
2016-10-28 21:27:05 +00:00
|
|
|
/**
|
|
|
|
* Non-blocking function to perform initial sync for an account. This will pull
|
|
|
|
* all remote resources (if they exist) before initializing sync.
|
|
|
|
*/
|
|
|
|
export function doInitialSync () {
|
|
|
|
process.nextTick(async () => {
|
|
|
|
// First, pull down all remote resources, without first creating new ones.
|
|
|
|
// This makes sure that the first sync won't create resources locally, when
|
|
|
|
// they already exist on the server.
|
|
|
|
await pull(null, false);
|
|
|
|
|
|
|
|
// Make sure sync is on (start the timers)
|
|
|
|
initSync();
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2016-10-24 23:30:37 +00:00
|
|
|
export async function push (resourceGroupId = null) {
|
2016-10-21 17:20:36 +00:00
|
|
|
if (!session.isLoggedIn()) {
|
|
|
|
logger.warn('Not logged in');
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2016-10-24 23:30:37 +00:00
|
|
|
let dirtyResources = [];
|
|
|
|
if (resourceGroupId) {
|
|
|
|
dirtyResources = await store.findActiveDirtyResourcesForResourceGroup(resourceGroupId)
|
|
|
|
} else {
|
|
|
|
dirtyResources = await store.findActiveDirtyResources()
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if (!dirtyResources.length) {
|
|
|
|
logger.debug('No changes to push');
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
let responseBody;
|
|
|
|
try {
|
|
|
|
responseBody = await util.fetchPost('/sync/push', dirtyResources);
|
|
|
|
} catch (e) {
|
|
|
|
logger.error('Failed to push changes', e);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Update all resource versions with the ones that were returned
|
|
|
|
const {updated} = responseBody;
|
|
|
|
for (const {id, version} of updated) {
|
2016-10-24 23:30:37 +00:00
|
|
|
const resource = await store.getResourceById(id);
|
|
|
|
await store.updateResource(resource, {version, dirty: false});
|
2016-10-21 17:20:36 +00:00
|
|
|
logger.debug(`Updated ${id}`);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Update all resource versions with the ones that were returned
|
|
|
|
const {created} = responseBody;
|
|
|
|
for (const {id, version} of created) {
|
2016-10-24 23:30:37 +00:00
|
|
|
const resource = await store.getResourceById(id);
|
|
|
|
await store.updateResource(resource, {version, dirty: false});
|
2016-10-21 17:20:36 +00:00
|
|
|
logger.debug(`Created ${id}`);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Update all resource versions with the ones that were returned
|
|
|
|
const {removed} = responseBody;
|
|
|
|
for (const {id, version} of removed) {
|
2016-10-24 23:30:37 +00:00
|
|
|
const resource = await store.getResourceById(id);
|
|
|
|
await store.updateResource(resource, {version, dirty: false});
|
2016-10-21 17:20:36 +00:00
|
|
|
logger.debug(`Removed ${id}`);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Resolve conflicts
|
|
|
|
const {conflicts} = responseBody;
|
|
|
|
for (const serverResource of conflicts) {
|
2016-10-24 23:30:37 +00:00
|
|
|
const localResource = await store.getResourceById(serverResource.id);
|
2016-10-21 17:20:36 +00:00
|
|
|
|
|
|
|
// On conflict, choose last edited one
|
|
|
|
const serverIsNewer = serverResource.lastEdited > localResource.lastEdited;
|
|
|
|
const winner = serverIsNewer ? serverResource : localResource;
|
|
|
|
|
|
|
|
// Decrypt the docs from the resources. Don't fetch the local doc from the
|
|
|
|
// app database, because it might have been deleted.
|
|
|
|
logger.debug(`Resolved conflict for ${serverResource.id} (${serverIsNewer ? 'Server' : 'Local'})`, winner);
|
|
|
|
|
|
|
|
// Update local resource
|
|
|
|
// NOTE: using localResource as the base to make sure we have _id
|
2016-10-24 23:30:37 +00:00
|
|
|
await store.updateResource(localResource, winner, {
|
2016-10-21 17:20:36 +00:00
|
|
|
version: serverResource.version, // Act as the server resource no matter what
|
|
|
|
dirty: !serverIsNewer // It's dirty if we chose the local doc
|
|
|
|
});
|
|
|
|
|
2016-10-28 21:27:05 +00:00
|
|
|
// If the server won, update ourselves. If we won, we already have the
|
|
|
|
// latest version, so do nothing.
|
|
|
|
if (serverIsNewer) {
|
2016-10-21 17:20:36 +00:00
|
|
|
const doc = await _decryptDoc(winner.resourceGroupId, winner.encContent);
|
2016-10-28 21:27:05 +00:00
|
|
|
if (winner.removed) {
|
|
|
|
await db.remove(doc, true);
|
|
|
|
} else {
|
|
|
|
await db.update(doc, true);
|
|
|
|
}
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-10-28 21:27:05 +00:00
|
|
|
export async function pull (resourceGroupId = null, createMissingResources = true) {
|
2016-10-21 17:20:36 +00:00
|
|
|
if (!session.isLoggedIn()) {
|
|
|
|
logger.warn('Not logged in');
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2016-10-28 21:27:05 +00:00
|
|
|
let allResources;
|
|
|
|
if (createMissingResources) {
|
|
|
|
allResources = await _getOrCreateAllActiveResources(resourceGroupId);
|
|
|
|
} else {
|
|
|
|
allResources = await store.allActiveResources(resourceGroupId);
|
|
|
|
}
|
2016-10-24 23:30:37 +00:00
|
|
|
|
|
|
|
let blacklistedConfigs;
|
|
|
|
if (resourceGroupId) {
|
2016-10-28 21:27:05 +00:00
|
|
|
// When doing a partial sync, blacklist all configs except the one we're
|
|
|
|
// trying to sync.
|
2016-10-24 23:30:37 +00:00
|
|
|
const allConfigs = await store.allConfigs();
|
|
|
|
blacklistedConfigs = allConfigs.filter(c => c.resourceGroupId !== resourceGroupId)
|
|
|
|
} else {
|
2016-10-28 21:27:05 +00:00
|
|
|
// When doing a full sync, blacklist the inactive configs
|
2016-10-24 23:30:37 +00:00
|
|
|
blacklistedConfigs = await store.findInactiveConfigs(resourceGroupId);
|
|
|
|
}
|
|
|
|
|
|
|
|
const resources = allResources.map(r => ({
|
2016-10-21 17:20:36 +00:00
|
|
|
id: r.id,
|
|
|
|
resourceGroupId: r.resourceGroupId,
|
|
|
|
version: r.version,
|
|
|
|
removed: r.removed
|
|
|
|
}));
|
|
|
|
|
2016-10-28 21:27:05 +00:00
|
|
|
const blacklistedResourceGroupIds = blacklistedConfigs.map(c => c.resourceGroupId);
|
2016-10-24 23:30:37 +00:00
|
|
|
const body = {resources, blacklist: blacklistedResourceGroupIds};
|
|
|
|
|
|
|
|
logger.debug(`Diffing ${resources.length} tags`);
|
2016-10-21 17:20:36 +00:00
|
|
|
|
|
|
|
let responseBody;
|
|
|
|
try {
|
|
|
|
responseBody = await util.fetchPost('/sync/pull', body);
|
|
|
|
} catch (e) {
|
|
|
|
logger.error('Failed to sync changes', e, body);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
const {
|
|
|
|
updatedResources,
|
|
|
|
createdResources,
|
|
|
|
idsToPush,
|
2016-10-24 23:30:37 +00:00
|
|
|
idsToRemove,
|
2016-10-21 17:20:36 +00:00
|
|
|
} = responseBody;
|
|
|
|
|
|
|
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ //
|
|
|
|
// Insert all the created docs to the DB //
|
|
|
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ //
|
|
|
|
|
|
|
|
await createdResources.map(async serverResource => {
|
2016-10-28 21:27:05 +00:00
|
|
|
let doc;
|
|
|
|
|
2016-10-21 17:20:36 +00:00
|
|
|
try {
|
|
|
|
const {resourceGroupId, encContent} = serverResource;
|
2016-10-28 21:27:05 +00:00
|
|
|
doc = await _decryptDoc(resourceGroupId, encContent);
|
|
|
|
} catch (e) {
|
|
|
|
logger.warn('Failed to decode created resource', e, serverResource);
|
|
|
|
return;
|
|
|
|
}
|
2016-10-21 17:20:36 +00:00
|
|
|
|
2016-10-28 21:27:05 +00:00
|
|
|
// Update local Resource
|
|
|
|
try {
|
2016-10-24 23:30:37 +00:00
|
|
|
await store.insertResource(serverResource, {dirty: false});
|
2016-10-21 17:20:36 +00:00
|
|
|
} catch (e) {
|
2016-10-28 21:27:05 +00:00
|
|
|
// This probably means we already have it. This should never happen, but
|
|
|
|
// might due to a rare race condition.
|
|
|
|
logger.error('Failed to insert resource', e);
|
|
|
|
return;
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
2016-10-28 21:27:05 +00:00
|
|
|
|
|
|
|
// NOTE: If the above Resource insert succeeded, that means we have safely
|
|
|
|
// insert the document. However, we're using an upsert here instead because
|
|
|
|
// it's very possible that the client already had that document locally.
|
|
|
|
// This might happen, for example, if the user logs out and back in again.
|
|
|
|
await db.upsert(doc, true);
|
2016-10-21 17:20:36 +00:00
|
|
|
});
|
|
|
|
|
|
|
|
if (createdResources.length) {
|
|
|
|
logger.debug(`Created ${createdResources.length} resources`, createdResources);
|
|
|
|
}
|
|
|
|
|
|
|
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ //
|
|
|
|
// Save all the updated docs to the DB //
|
|
|
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ //
|
|
|
|
|
|
|
|
await updatedResources.map(async serverResource => {
|
|
|
|
try {
|
|
|
|
const {resourceGroupId, encContent} = serverResource;
|
|
|
|
const doc = await _decryptDoc(resourceGroupId, encContent);
|
|
|
|
|
2016-10-28 21:27:05 +00:00
|
|
|
// Update app database
|
2016-10-24 23:30:37 +00:00
|
|
|
await db.update(doc, true);
|
2016-10-21 17:20:36 +00:00
|
|
|
|
|
|
|
// Update local resource
|
2016-10-24 23:30:37 +00:00
|
|
|
const resource = await store.getResourceById(serverResource.id);
|
|
|
|
await store.updateResource(resource, serverResource, {dirty: false});
|
2016-10-21 17:20:36 +00:00
|
|
|
} catch (e) {
|
|
|
|
logger.warn('Failed to decode updated resource', e, serverResource);
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
if (updatedResources.length) {
|
|
|
|
logger.debug(`Updated ${updatedResources.length} resources`, updatedResources);
|
|
|
|
}
|
|
|
|
|
|
|
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ //
|
|
|
|
// Remove all the docs that need removing //
|
|
|
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ //
|
|
|
|
|
|
|
|
for (const id of idsToRemove) {
|
2016-10-24 23:30:37 +00:00
|
|
|
const resource = await store.getResourceById(id);
|
2016-10-21 17:20:36 +00:00
|
|
|
if (!resource) {
|
|
|
|
throw new Error(`Could not find Resource to remove for ${id}`)
|
|
|
|
}
|
|
|
|
|
|
|
|
const doc = await _decryptDoc(resource.resourceGroupId, resource.encContent);
|
|
|
|
if (!doc) {
|
|
|
|
throw new Error(`Could not find doc to remove ${id}`)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Mark resource as deleted
|
2016-10-24 23:30:37 +00:00
|
|
|
await store.updateResource(resource, {dirty: false, removed: true});
|
2016-10-21 17:20:36 +00:00
|
|
|
|
2016-10-28 21:27:05 +00:00
|
|
|
// Remove from DB
|
2016-10-24 23:30:37 +00:00
|
|
|
await db.remove(doc, true);
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ //
|
|
|
|
// Push all the docs that need pushing //
|
|
|
|
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ //
|
|
|
|
|
|
|
|
for (const id of idsToPush) {
|
2016-10-24 23:30:37 +00:00
|
|
|
const resource = await store.getResourceById(id);
|
2016-10-21 17:20:36 +00:00
|
|
|
if (!resource) {
|
|
|
|
throw new Error(`Could not find Resource to push for id ${id}`)
|
|
|
|
}
|
|
|
|
|
|
|
|
const doc = await _decryptDoc(resource.resourceGroupId, resource.encContent);
|
|
|
|
if (!doc) {
|
|
|
|
throw new Error(`Could not find doc to push ${id}`)
|
|
|
|
}
|
|
|
|
|
|
|
|
_queueChange(db.CHANGE_UPDATE, doc)
|
|
|
|
}
|
2016-10-24 23:30:37 +00:00
|
|
|
|
|
|
|
return updatedResources.length + createdResources.length;
|
|
|
|
}
|
|
|
|
|
|
|
|
export async function getOrCreateConfig (resourceGroupId) {
|
|
|
|
const config = await store.getConfig(resourceGroupId);
|
|
|
|
|
|
|
|
if (!config) {
|
|
|
|
return await store.insertConfig({resourceGroupId});
|
|
|
|
} else {
|
|
|
|
return config;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
export async function createOrUpdateConfig (resourceGroupId, syncMode) {
|
|
|
|
const config = await store.getConfig(resourceGroupId);
|
2016-10-28 21:27:05 +00:00
|
|
|
const patch = {resourceGroupId, syncMode};
|
2016-10-24 23:30:37 +00:00
|
|
|
|
|
|
|
if (config) {
|
|
|
|
return await store.updateConfig(config, patch);
|
|
|
|
} else {
|
|
|
|
return await store.insertConfig(patch);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-10-25 01:23:49 +00:00
|
|
|
export async function resetLocalData () {
|
|
|
|
for (const r of await store.allResources()) {
|
|
|
|
await store.removeResource(r);
|
|
|
|
}
|
|
|
|
|
|
|
|
for (const c of await store.allConfigs()) {
|
|
|
|
await store.removeConfig(c);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
export async function resetRemoteData () {
|
|
|
|
await util.fetchPost('/auth/reset');
|
|
|
|
}
|
|
|
|
|
2016-10-24 23:30:37 +00:00
|
|
|
// ~~~~~~~ //
|
|
|
|
// HELPERS //
|
|
|
|
// ~~~~~~~ //
|
|
|
|
|
2016-10-24 23:52:05 +00:00
|
|
|
let _queuedChanges = {};
|
|
|
|
let _queuedChangesTimeout = null;
|
|
|
|
let _pushChangesTimeout = null;
|
2016-10-24 23:30:37 +00:00
|
|
|
|
|
|
|
async function _queueChange (event, doc) {
|
|
|
|
if (!session.isLoggedIn()) {
|
|
|
|
logger.warn('Not logged in');
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2016-10-24 23:52:05 +00:00
|
|
|
// How this works?
|
|
|
|
// First, debounce updates to Resources because they are heavy (encryption)
|
|
|
|
// Second, debounce pushes to the server, because they are slow (network)
|
|
|
|
// ... Using _queuedChanges as a map so that future changes to the same doc
|
|
|
|
// don't trigger more than 1 update.
|
|
|
|
|
|
|
|
// NOTE: Don't use doc.modified because that doesn't work for removal
|
|
|
|
_queuedChanges[doc._id + event] = [event, doc, Date.now()];
|
|
|
|
|
|
|
|
clearTimeout(_queuedChangesTimeout);
|
|
|
|
_queuedChangesTimeout = setTimeout(async () => {
|
|
|
|
|
|
|
|
const queuedChangesCopy = Object.assign({}, _queuedChanges);
|
|
|
|
_queuedChanges = {};
|
2016-10-24 23:30:37 +00:00
|
|
|
|
2016-10-24 23:52:05 +00:00
|
|
|
for (const k of Object.keys(queuedChangesCopy)) {
|
|
|
|
const [event, doc, ts] = queuedChangesCopy[k];
|
2016-10-24 23:30:37 +00:00
|
|
|
|
2016-10-24 23:52:05 +00:00
|
|
|
// Update the resource content and set dirty
|
|
|
|
const resource = await getOrCreateResourceForDoc(doc);
|
|
|
|
await store.updateResource(resource, {
|
|
|
|
lastEdited: ts,
|
|
|
|
lastEditedBy: session.getAccountId(),
|
|
|
|
encContent: await _encryptDoc(resource.resourceGroupId, doc),
|
|
|
|
removed: event === db.CHANGE_REMOVE,
|
|
|
|
dirty: true
|
|
|
|
});
|
|
|
|
|
|
|
|
logger.debug(`Queue ${event} ${doc._id}`);
|
|
|
|
|
|
|
|
// Debounce pushing of dirty resources
|
|
|
|
clearTimeout(_pushChangesTimeout);
|
|
|
|
_pushChangesTimeout = setTimeout(() => push(), PUSH_DEBOUNCE_TIME);
|
|
|
|
}
|
|
|
|
}, QUEUE_DEBOUNCE_TIME);
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Fetch a ResourceGroup. If it has been fetched before, lookup from memory
|
|
|
|
*
|
|
|
|
* @param resourceGroupId
|
|
|
|
* @returns {*}
|
|
|
|
*/
|
|
|
|
async function _fetchResourceGroup (resourceGroupId) {
|
|
|
|
let resourceGroup = resourceGroupCache[resourceGroupId];
|
|
|
|
|
|
|
|
if (!resourceGroup) {
|
|
|
|
// TODO: Handle a 404 here
|
|
|
|
try {
|
|
|
|
resourceGroup = resourceGroupCache[resourceGroupId] = await util.fetchGet(
|
|
|
|
`/api/resource_groups/${resourceGroupId}`
|
|
|
|
);
|
|
|
|
} catch (e) {
|
|
|
|
logger.error(`Failed to get ResourceGroup ${resourceGroupId}: ${e}`);
|
|
|
|
throw e;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return resourceGroup;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Get a ResourceGroup's symmetric encryption key
|
|
|
|
*
|
|
|
|
* @param resourceGroupId
|
|
|
|
* @private
|
|
|
|
*/
|
|
|
|
async function _getResourceGroupSymmetricKey (resourceGroupId) {
|
|
|
|
const resourceGroup = await _fetchResourceGroup(resourceGroupId);
|
|
|
|
const accountPrivateKey = await session.getPrivateKey();
|
|
|
|
|
|
|
|
const symmetricKeyStr = crypt.decryptRSAWithJWK(
|
|
|
|
accountPrivateKey,
|
|
|
|
resourceGroup.encSymmetricKey
|
|
|
|
);
|
|
|
|
|
|
|
|
return JSON.parse(symmetricKeyStr);
|
|
|
|
}
|
|
|
|
|
|
|
|
async function _encryptDoc (resourceGroupId, doc) {
|
|
|
|
try {
|
|
|
|
const symmetricKey = await _getResourceGroupSymmetricKey(resourceGroupId);
|
|
|
|
const docStr = JSON.stringify(doc);
|
|
|
|
const message = crypt.encryptAES(symmetricKey, docStr);
|
|
|
|
return JSON.stringify(message);
|
|
|
|
} catch (e) {
|
|
|
|
logger.error(`Failed to encrypt for ${resourceGroupId}: ${e}`);
|
|
|
|
throw e;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
async function _decryptDoc (resourceGroupId, messageJSON) {
|
|
|
|
try {
|
|
|
|
const symmetricKey = await _getResourceGroupSymmetricKey(resourceGroupId);
|
|
|
|
const message = JSON.parse(messageJSON);
|
|
|
|
const decrypted = crypt.decryptAES(symmetricKey, message);
|
|
|
|
return JSON.parse(decrypted);
|
|
|
|
} catch (e) {
|
|
|
|
logger.error(`Failed to decrypt from ${resourceGroupId}: ${e}`);
|
|
|
|
throw e;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
async function _getWorkspaceForDoc (doc) {
|
|
|
|
const ancestors = await db.withAncestors(doc);
|
|
|
|
return ancestors.find(d => d.type === db.workspace.type);
|
|
|
|
}
|
|
|
|
|
2016-10-24 23:30:37 +00:00
|
|
|
async function _createResourceGroup (name = '') {
|
2016-10-21 17:20:36 +00:00
|
|
|
// Generate symmetric key for ResourceGroup
|
|
|
|
const rgSymmetricJWK = await crypt.generateAES256Key();
|
|
|
|
const rgSymmetricJWKStr = JSON.stringify(rgSymmetricJWK);
|
|
|
|
|
|
|
|
// Encrypt the symmetric key with Account public key
|
|
|
|
const publicJWK = session.getPublicKey();
|
|
|
|
const encRGSymmetricJWK = crypt.encryptRSAWithJWK(publicJWK, rgSymmetricJWKStr);
|
|
|
|
|
|
|
|
// Create the new ResourceGroup
|
|
|
|
let resourceGroup;
|
|
|
|
try {
|
|
|
|
resourceGroup = await util.fetchPost('/api/resource_groups', {
|
|
|
|
name,
|
|
|
|
encSymmetricKey: encRGSymmetricJWK,
|
|
|
|
});
|
|
|
|
} catch (e) {
|
|
|
|
logger.error(`Failed to create ResourceGroup: ${e}`);
|
|
|
|
throw e
|
|
|
|
}
|
|
|
|
|
2016-10-24 23:30:37 +00:00
|
|
|
// Create a config for it
|
2016-10-28 21:27:05 +00:00
|
|
|
await createOrUpdateConfig(resourceGroup.id, store.SYNC_MODE_OFF);
|
2016-10-24 23:30:37 +00:00
|
|
|
|
2016-10-21 17:20:36 +00:00
|
|
|
logger.debug(`created ResourceGroup ${resourceGroup.id}`);
|
|
|
|
return resourceGroup;
|
|
|
|
}
|
|
|
|
|
|
|
|
async function _createResource (doc, resourceGroupId) {
|
2016-10-24 23:30:37 +00:00
|
|
|
return store.insertResource({
|
2016-10-21 17:20:36 +00:00
|
|
|
id: doc._id,
|
|
|
|
resourceGroupId: resourceGroupId,
|
|
|
|
version: NO_VERSION,
|
|
|
|
createdBy: session.getAccountId(),
|
|
|
|
lastEdited: doc.modified,
|
|
|
|
lastEditedBy: session.getAccountId(),
|
|
|
|
removed: false,
|
|
|
|
type: doc.type,
|
|
|
|
encContent: await _encryptDoc(resourceGroupId, doc),
|
|
|
|
dirty: true
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
async function _createResourceForDoc (doc) {
|
|
|
|
// No resource yet, so create one
|
|
|
|
const workspace = await _getWorkspaceForDoc(doc);
|
|
|
|
|
|
|
|
if (!workspace) {
|
2016-10-24 23:30:37 +00:00
|
|
|
throw new Error(`Could not find workspace for doc ${doc._id}`);
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
|
|
|
|
2016-10-24 23:30:37 +00:00
|
|
|
let workspaceResource = await store.getResourceById(workspace._id);
|
2016-10-21 17:20:36 +00:00
|
|
|
|
|
|
|
if (!workspaceResource) {
|
|
|
|
// TODO: Don't auto create a ResourceGroup
|
2016-10-24 23:30:37 +00:00
|
|
|
const workspaceResourceGroup = await _createResourceGroup(workspace.name);
|
2016-10-21 17:20:36 +00:00
|
|
|
workspaceResource = await _createResource(workspace, workspaceResourceGroup.id);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (workspace === doc) {
|
|
|
|
// If the current doc IS a Workspace, just return it
|
|
|
|
return workspaceResource;
|
|
|
|
} else {
|
|
|
|
return await _createResource(doc, workspaceResource.resourceGroupId);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-10-24 23:30:37 +00:00
|
|
|
export async function getOrCreateResourceForDoc (doc) {
|
|
|
|
let resource = await store.getResourceById(doc._id);
|
2016-10-21 17:20:36 +00:00
|
|
|
|
|
|
|
if (!resource) {
|
|
|
|
resource = await _createResourceForDoc(doc);
|
|
|
|
}
|
|
|
|
|
|
|
|
return resource;
|
|
|
|
}
|
|
|
|
|
2016-10-24 23:30:37 +00:00
|
|
|
async function _getOrCreateAllActiveResources (resourceGroupId = null) {
|
|
|
|
const activeResourceMap = {};
|
|
|
|
|
|
|
|
let activeResources;
|
|
|
|
if (resourceGroupId) {
|
|
|
|
activeResources = await store.activeResourcesForResourceGroup(resourceGroupId);
|
|
|
|
} else {
|
2016-10-28 21:27:05 +00:00
|
|
|
activeResources = await store.allActiveResources();
|
2016-10-24 23:30:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
for (const r of activeResources) {
|
|
|
|
activeResourceMap[r.id] = r;
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// TODO: This is REALLY slow (relatively speaking)
|
|
|
|
for (const type of Object.keys(WHITE_LIST)) {
|
|
|
|
for (const doc of await db.all(type)) {
|
2016-10-24 23:30:37 +00:00
|
|
|
const resource = await store.getResourceById(doc._id);
|
2016-10-21 17:20:36 +00:00
|
|
|
if (!resource) {
|
2016-10-24 23:30:37 +00:00
|
|
|
activeResourceMap[doc._id] = await _createResourceForDoc(doc);
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-10-24 23:30:37 +00:00
|
|
|
return Object.keys(activeResourceMap).map(k => activeResourceMap[k]);
|
2016-10-21 17:20:36 +00:00
|
|
|
}
|